diff --git a/.forgejo/workflows/build-on-commit.yml b/.forgejo/workflows/build-on-commit.yml deleted file mode 100644 index e8f0d2e3..00000000 --- a/.forgejo/workflows/build-on-commit.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Build Docker Image on Commit - -on: - push: - branches: - - main - tags: - - '!' # Exclude tags - -jobs: - build-and-publish: - runs-on: docker-builder - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set REPO_VARS - id: repo-url - run: | - echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV - echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV - - - name: Login to OCI registry - run: | - echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin - - - name: Build and push Docker images - run: | - # Build Docker image with commit SHA - docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} . - docker push $REPO_HOST/$REPO_PATH:${{ github.sha }} - - # Build Docker image with nightly tag - docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly - docker push $REPO_HOST/$REPO_PATH:nightly - - # Remove local images to save storage - docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }} - docker rmi $REPO_HOST/$REPO_PATH:nightly diff --git a/.forgejo/workflows/build-on-tag.yml b/.forgejo/workflows/build-on-tag.yml deleted file mode 100644 index 888102b6..00000000 --- a/.forgejo/workflows/build-on-tag.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Build and Publish Docker Image on Tag - -on: - push: - tags: - - '*' - -jobs: - build-and-publish: - runs-on: docker-builder - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set REPO_VARS - id: repo-url - run: | - echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV - echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV - - - name: Login to OCI registry - run: | - echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin - - - name: Build and push Docker image - run: | - TAG=${{ github.ref_name }} # Get the tag name from the context - # Build and push multi-platform Docker images - docker build -t $REPO_HOST/$REPO_PATH:$TAG --push . - # Tag and push latest - docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest - docker push $REPO_HOST/$REPO_PATH:latest - - # Remove the local image to save storage - docker rmi $REPO_HOST/$REPO_PATH:$TAG - docker rmi $REPO_HOST/$REPO_PATH:latest \ No newline at end of file diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index aab991d5..00000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,10 +0,0 @@ -github: canewsin -patreon: # Replace with a single Patreon username e.g., user1 -open_collective: # Replace with a single Open Collective username e.g., user1 -ko_fi: canewsin -tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: canewsin -issuehunt: # Replace with a single IssueHunt username e.g., user1 -otechie: # Replace with a single Otechie username e.g., user1 -custom: ['https://paypal.me/PramUkesh', 'https://zerolink.ml/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/'] diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 27b5c924..00000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,72 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ py3-latest ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ py3-latest ] - schedule: - - cron: '32 19 * * 2' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'javascript', 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 2bdcaf95..00000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: tests - -on: [push, pull_request] - -jobs: - test: - runs-on: ubuntu-20.04 - strategy: - max-parallel: 16 - matrix: - python-version: ["3.7", "3.8", "3.9"] - - steps: - - name: Checkout ZeroNet - uses: actions/checkout@v2 - with: - submodules: "true" - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Prepare for installation - run: | - python3 -m pip install setuptools - python3 -m pip install --upgrade pip wheel - python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - - - name: Install - run: | - python3 -m pip install --upgrade -r requirements.txt - python3 -m pip list - - - name: Prepare for tests - run: | - openssl version -a - echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6 - - - name: Test - run: | - catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini - export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test - export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test - export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test - export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test - export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test - export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test - find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ diff --git a/.gitignore b/.gitignore index 636cd115..b3795821 100644 --- a/.gitignore +++ b/.gitignore @@ -7,11 +7,8 @@ __pycache__/ # Hidden files .* -!/.forgejo -!/.github !/.gitignore !/.travis.yml -!/.gitlab-ci.yml # Temporary files *.bak @@ -31,6 +28,3 @@ tools/phantomjs # ZeroNet config file zeronet.conf - -# ZeroNet log files -log/* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index f3e1ed29..00000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,48 +0,0 @@ -stages: - - test - -.test_template: &test_template - stage: test - before_script: - - pip install --upgrade pip wheel - # Selenium and requests can't be installed without a requests hint on Python 3.4 - - pip install --upgrade requests>=2.22.0 - - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - - pip install --upgrade -r requirements.txt - script: - - pip list - - openssl version -a - - python -m pytest -x plugins/CryptMessage/Test --color=yes - - python -m pytest -x plugins/Bigfile/Test --color=yes - - python -m pytest -x plugins/AnnounceLocal/Test --color=yes - - python -m pytest -x plugins/OptionalManager/Test --color=yes - - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini --color=yes - - mv plugins/disabled-Multiuser plugins/Multiuser - - python -m pytest -x plugins/Multiuser/Test --color=yes - - mv plugins/disabled-Bootstrapper plugins/Bootstrapper - - python -m pytest -x plugins/Bootstrapper/Test --color=yes - - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ - -test:py3.4: - image: python:3.4.3 - <<: *test_template - -test:py3.5: - image: python:3.5.7 - <<: *test_template - -test:py3.6: - image: python:3.6.9 - <<: *test_template - -test:py3.7-openssl1.1.0: - image: python:3.7.0b5 - <<: *test_template - -test:py3.7-openssl1.1.1: - image: python:3.7.4 - <<: *test_template - -test:py3.8: - image: python:3.8.0b3 - <<: *test_template \ No newline at end of file diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 2c602a5a..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "plugins"] - path = plugins - url = https://github.com/ZeroNetX/ZeroNet-Plugins.git diff --git a/.travis.yml b/.travis.yml index bdaafa22..9af1d69b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,17 +4,19 @@ python: - 3.5 - 3.6 - 3.7 - - 3.8 + - 3.8-dev +dist: xenial services: - docker cache: pip before_install: - pip install --upgrade pip wheel - - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium + - pip install codecov coveralls flake8 mock pytest pytest-cov selenium # - docker build -t zeronet . # - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet install: - - pip install --upgrade -r requirements.txt + - if [[ $TRAVIS_PYTHON_VERSION == 3.8-dev ]]; then pip install setuptools cffi 'cython>=0.28' git+git://github.com/gevent/gevent.git#egg=gevent; fi + - pip install -r requirements.txt - pip list before_script: - openssl version -a @@ -24,19 +26,14 @@ before_script: sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6'; fi script: - - catchsegv python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini - - export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python -m pytest -x plugins/CryptMessage/Test - - export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python -m pytest -x plugins/Bigfile/Test - - export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python -m pytest -x plugins/AnnounceLocal/Test - - export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python -m pytest -x plugins/OptionalManager/Test - - export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test - - export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test - - find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - - find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ -after_failure: - - zip -r log.zip log/ - - curl --upload-file ./log.zip https://transfer.sh/log.zip + - python -m pytest -x plugins/CryptMessage/Test + - python -m pytest -x plugins/Bigfile/Test + - python -m pytest -x plugins/AnnounceLocal/Test + - python -m pytest -x plugins/OptionalManager/Test + - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini + - mv plugins/disabled-Multiuser plugins/Multiuser && python -m pytest -x plugins/Multiuser/Test + - mv plugins/disabled-Bootstrapper plugins/Bootstrapper && python -m pytest -x plugins/Bootstrapper/Test + - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pybitcointools/ after_success: - codecov - coveralls --rcfile=src/Test/coverage.ini diff --git a/CHANGELOG-zh-cn.md b/CHANGELOG-zh-cn.md new file mode 100644 index 00000000..c09ca401 --- /dev/null +++ b/CHANGELOG-zh-cn.md @@ -0,0 +1,134 @@ +## ZeroNet 0.5.1 (2016-11-18) +### 新增 +- 多语言界面 +- 新插件:为站点 HTML 与 JS 文件提供的翻译助手 +- 每个站点独立的 favicon + +### 修复 +- 并行可选文件下载 + +## ZeroNet 0.5.0 (2016-11-08) +### 新增 +- 新插件:允许在 ZeroHello 列出/删除/固定/管理文件 +- 新的 API 命令来关注用户的可选文件,与可选文件的请求统计 +- 新的可选文件总大小限制 +- 新插件:保存节点到数据库并在重启时保持它们,使得更快的可选文件搜索以及在没有 Tracker 的情况下工作 +- 重写 UPnP 端口打开器 + 退出时关闭端口(感谢 sirMackk!) +- 通过懒惰 PeerHashfield 创建来减少内存占用 +- 在 /Stats 页面加载 JSON 文件统计与数据库信息 + +### 更改 +- 独立的锁定文件来获得更好的 Windows 兼容性 +- 当执行 start.py 时,即使 ZeroNet 已经运行也打开浏览器 +- 在重载时保持插件顺序来允许插件扩展另一个插件 +- 只在完整加载 sites.json 时保存来避免数据丢失 +- 将更多的 Tracker 更改为更可靠的 Tracker +- 更少的 findhashid CPU 使用率 +- 合并下载大量可选文件 +- 更多对于可选文件的其他优化 +- 如果一个站点有 1000 个节点,更积极地清理 +- 为验证错误使用警告而不是错误 +- 首先推送更新到更新的客户端 +- 损坏文件重置改进 + +### 修复 +- 修复启动时出现的站点删除错误 +- 延迟 WebSocket 消息直到连接上 +- 修复如果文件包含额外数据时的数据库导入 +- 修复大站点下载 +- 修复 diff 发送 bug (跟踪它好长时间了) +- 修复当 JSON 文件包含 [] 字符时随机出现的发布错误 +- 修复 siteDelete 与 siteCreate bug +- 修复文件写入确认对话框 + + +## ZeroNet 0.4.1 (2016-09-05) +### 新增 +- 更快启动与更少内存使用的内核改变 +- 尝试连接丢失时重新连接 Tor +- 侧边栏滑入 +- 尝试避免不完整的数据文件被覆盖 +- 更快地打开数据库 +- 在侧边栏显示用户文件大小 +- 依赖 --connection_limit 的并发 worker 数量 + + +### 更改 +- 在空闲 5 分钟后关闭数据库 +- 更好的站点大小计算 +- 允许在域名中使用“-”符号 +- 总是尝试为站点保持连接 +- 移除已合并站点的合并权限 +- 只扫描最后 3 天的新闻源来加快数据库请求 +- 更新 ZeroBundle-win 到 Python 2.7.12 + + +### 修复 +- 修复重要的安全问题:允许任意用户无需有效的来自 ID 提供者的证书发布新内容,感谢 Kaffie 指出 +- 修复在没有选择提供证书提供者时的侧边栏错误 +- 在数据库重建时跳过无效文件 +- 修复随机弹出的 WebSocket 连接错误 +- 修复新的 siteCreate 命令 +- 修复站点大小计算 +- 修复计算机唤醒后的端口打开检查 +- 修复 --size_limit 的命令行解析 + + +## ZeroNet 0.4.0 (2016-08-11) +### 新增 +- 合并站点插件 +- Live source code reloading: Faster core development by allowing me to make changes in ZeroNet source code without restarting it. +- 为合并站点设计的新 JSON 表 +- 从侧边栏重建数据库 +- 允许直接在 JSON 表中存储自定义数据:更简单与快速的 SQL 查询 +- 用户文件存档:允许站点拥有者存档不活跃的用户内容到单个文件(减少初始同步的时间/CPU/内存使用率) +- 在文件删除时同时触发数据库 onUpdated/update +- 从 ZeroFrame API 请求权限 +- 允许使用 fileWrite API 命令在 content.json 存储额外数据 +- 更快的可选文件下载 +- 使用替代源 (Gogs, Gitlab) 来下载更新 +- Track provided sites/connection and prefer to keep the ones with more sites to reduce connection number + +### 更改 +- 保持每个站点至少 5 个连接 +- 将目标站点连接从 10 更改到 15 +- ZeroHello 搜索功能稳定性/速度改进 +- 提升机械硬盘下的客户端性能 + +### 修复 +- 修复 IE11 wrapper nonce 错误 +- 修复在移动设备上的侧边栏 +- 修复站点大小计算 +- 修复 IE10 兼容性 +- Windows XP ZeroBundle 兼容性(感谢中国人民) + + +## ZeroNet 0.3.7 (2016-05-27) +### 更改 +- 通过只传输补丁来减少带宽使用 +- 其他 CPU /内存优化 + + +## ZeroNet 0.3.6 (2016-05-27) +### 新增 +- 新的 ZeroHello +- Newsfeed 函数 + +### 修复 +- 安全性修复 + + +## ZeroNet 0.3.5 (2016-02-02) +### 新增 +- 带有 .onion 隐藏服务的完整 Tor 支持 +- 使用 ZeroNet 协议的 Bootstrap + +### 修复 +- 修复 Gevent 1.0.2 兼容性 + + +## ZeroNet 0.3.4 (2015-12-28) +### 新增 +- AES, ECIES API 函数支持 +- PushState 与 ReplaceState URL 通过 API 的操作支持 +- 多用户 localstorage diff --git a/CHANGELOG.md b/CHANGELOG.md index 6974d18a..225e424a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,201 +1,3 @@ -### ZeroNet 0.9.0 (2023-07-12) Rev4630 - - Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9 - - Add trackers to Config.py for failsafety incase missing trackers.txt - - Added Proxy links - - Fix pysha3 dep installation issue - - FileRequest -> Remove Unnecessary check, Fix error wording - - Fix Response when site is missing for `actionAs` - - -### ZeroNet 0.8.5 (2023-02-12) Rev4625 - - Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows. - - default theme-class for missing value in `users.json`. - - Fetch Stats Plugin changes. - -### ZeroNet 0.8.4 (2022-12-12) Rev4620 - - Increase Minimum Site size to 25MB. - -### ZeroNet 0.8.3 (2022-12-11) Rev4611 - - main.py -> Fix accessing unassigned varible - - ContentManager -> Support for multiSig - - SiteStrorage.py -> Fix accessing unassigned varible - - ContentManager.py Improve Logging of Valid Signers - -### ZeroNet 0.8.2 (2022-11-01) Rev4610 - - Fix Startup Error when plugins dir missing - - Move trackers to seperate file & Add more trackers - - Config:: Skip loading missing tracker files - - Added documentation for getRandomPort fn - -### ZeroNet 0.8.1 (2022-10-01) Rev4600 - - fix readdress loop (cherry-pick previously added commit from conservancy) - - Remove Patreon badge - - Update README-ru.md (#177) - - Include inner_path of failed request for signing in error msg and response - - Don't Fail Silently When Cert is Not Selected - - Console Log Updates, Specify min supported ZeroNet version for Rust version Protocol Compatibility - - Update FUNDING.yml - -### ZeroNet 0.8.0 (2022-05-27) Rev4591 - - Revert File Open to catch File Access Errors. - -### ZeroNet 0.7.9-patch (2022-05-26) Rev4586 - - Use xescape(s) from zeronet-conservancy - - actionUpdate response Optimisation - - Fetch Plugins Repo Updates - - Fix Unhandled File Access Errors - - Create codeql-analysis.yml - -### ZeroNet 0.7.9 (2022-05-26) Rev4585 - - Rust Version Compatibility for update Protocol msg - - Removed Non Working Trakers. - - Dynamically Load Trackers from Dashboard Site. - - Tracker Supply Improvements. - - Fix Repo Url for Bug Report - - First Party Tracker Update Service using Dashboard Site. - - remove old v2 onion service [#158](https://github.com/ZeroNetX/ZeroNet/pull/158) - -### ZeroNet 0.7.8 (2022-03-02) Rev4580 - - Update Plugins with some bug fixes and Improvements - -### ZeroNet 0.7.6 (2022-01-12) Rev4565 - - Sync Plugin Updates - - Clean up tor v3 patch [#115](https://github.com/ZeroNetX/ZeroNet/pull/115) - - Add More Default Plugins to Repo - - Doubled Site Publish Limits - - Update ZeroNet Repo Urls [#103](https://github.com/ZeroNetX/ZeroNet/pull/103) - - UI/UX: Increases Size of Notifications Close Button [#106](https://github.com/ZeroNetX/ZeroNet/pull/106) - - Moved Plugins to Seperate Repo - - Added `access_key` variable in Config, this used to access restrited plugins when multiuser plugin is enabled. When MultiUserPlugin is enabled we cannot access some pages like /Stats, this key will remove such restriction with access key. - - Added `last_connection_id_current_version` to ConnectionServer, helpful to estimate no of connection from current client version. - - Added current version: connections to /Stats page. see the previous point. - -### ZeroNet 0.7.5 (2021-11-28) Rev4560 - - Add more default trackers - - Change default homepage address to `1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d` - - Change default update site address to `1Update8crprmciJHwp2WXqkx2c4iYp18` - -### ZeroNet 0.7.3 (2021-11-28) Rev4555 - - Fix xrange is undefined error - - Fix Incorrect viewport on mobile while loading - - Tor-V3 Patch by anonymoose - - -### ZeroNet 0.7.1 (2019-07-01) Rev4206 -### Added - - Built-in logging console in the web UI to see what's happening in the background. (pull down top-right 0 button to see it) - - Display database rebuild errors [Thanks to Lola] - - New plugin system that allows to install and manage builtin/third party extensions to the ZeroNet client using the web interface. - - Support multiple trackers_file - - Add OpenSSL 1.1 support to CryptMessage plugin based on Bitmessage modifications [Thanks to radfish] - - Display visual error message on startup errors - - Fix max opened files changing on Windows platform - - Display TLS1.3 compatibility on /Stats page - - Add fake SNI and ALPN to peer connections to make it more like standard https connections - - Hide and ignore tracker_proxy setting in Tor: Always mode as it's going to use Tor anyway. - - Deny websocket connections from unknown origins - - Restrict open_browser values to avoid RCE on sandbox escape - - Offer access web interface by IP address in case of unknown host - - Link to site's sidebar with "#ZeroNet:OpenSidebar" hash - -### Changed - - Allow .. in file names [Thanks to imachug] - - Change unstable trackers - - More clean errors on sites.json/users.json load error - - Various tweaks for tracker rating on unstable connections - - Use OpenSSL 1.1 dlls from default Python Windows distribution if possible - - Re-factor domain resolving for easier domain plugins - - Disable UDP connections if --proxy is used - - New, decorator-based Websocket API permission system to avoid future typo mistakes - -### Fixed - - Fix parsing config lines that have no value - - Fix start.py [Thanks to imachug] - - Allow multiple values of the same key in the config file [Thanks ssdifnskdjfnsdjk for reporting] - - Fix parsing config file lines that has % in the value [Thanks slrslr for reporting] - - Fix bootstrapper plugin hash reloads [Thanks geekless for reporting] - - Fix CryptMessage plugin OpenSSL dll loading on Windows (ZeroMail errors) [Thanks cxgreat2014 for reporting] - - Fix startup error when using OpenSSL 1.1 [Thanks to imachug] - - Fix a bug that did not loaded merged site data for 5 sec after the merged site got added - - Fix typo that allowed to add new plugins in public proxy mode. [Thanks styromaniac for reporting] - - Fix loading non-big files with "|all" postfix [Thanks to krzotr] - - Fix OpenSSL cert generation error crash by change Windows console encoding to utf8 - -#### Wrapper html injection vulnerability [Reported by ivanq] - -In ZeroNet before rev4188 the wrapper template variables was rendered incorrectly. - -Result: The opened site was able to gain WebSocket connection with unrestricted ADMIN/NOSANDBOX access, change configuration values and possible RCE on client's machine. - -Fix: Fixed the template rendering code, disallowed WebSocket connections from unknown locations, restricted open_browser configuration values to avoid possible RCE in case of sandbox escape. - -Note: The fix is also back ported to ZeroNet Py 2.x version (Rev3870) - - -### ZeroNet 0.7.0 (2019-06-12) Rev4106 (First release targeting Python 3.4+) -### Added - - 5-10x faster signature verification by using libsecp256k1 (Thanks to ZeroMux) - - Generated SSL certificate randomization to avoid protocol filters (Thanks to ValdikSS) - - Offline mode - - P2P source code update using ZeroNet protocol - - ecdsaSign/Verify commands to CryptMessage plugin (Thanks to imachug) - - Efficient file rename: change file names instead of re-downloading the file. - - Make redirect optional on site cloning (Thanks to Lola) - - EccPrivToPub / EccPubToPriv functions (Thanks to imachug) - - Detect and change dark/light theme based on OS setting (Thanks to filips123) - -### Changed - - Re-factored code to Python3 runtime (compatible with Python 3.4-3.8) - - More safe database sync mode - - Removed bundled third-party libraries where it's possible - - Use lang=en instead of lang={lang} in urls to avoid url encode problems - - Remove environment details from error page - - Don't push content.json updates larger than 10kb to significantly reduce bw usage for site with many files - -### Fixed - - Fix sending files with \0 characters - - Security fix: Escape error detail to avoid XSS (reported by krzotr) - - Fix signature verification using libsecp256k1 for compressed addresses (mostly certificates generated in the browser) - - Fix newsfeed if you have more than 1000 followed topic/post on one site. - - Fix site download as zip file - - Fix displaying sites with utf8 title - - Error message if dbRebuild fails (Thanks to Lola) - - Fix browser reopen if executing start.py again. (Thanks to imachug) - - -### ZeroNet 0.6.5 (2019-02-16) Rev3851 (Last release targeting Python 2.7.x) -### Added - - IPv6 support in peer exchange, bigfiles, optional file finding, tracker sharing, socket listening and connecting (based on tangdou1 modifications) - - New tracker database format with IPv6 support - - Display notification if there is an unpublished modification for your site - - Listen and shut down normally for SIGTERM (Thanks to blurHY) - - Support tilde `~` in filenames (by d14na) - - Support map for Namecoin subdomain names (Thanks to lola) - - Add log level to config page - - Support `{data}` for data dir variable in trackers_file value - - Quick check content.db on startup and rebuild if necessary - - Don't show meek proxy option if the tor client does not supports it - -### Changed - - Refactored port open checking with IPv6 support - - Consider non-local IPs as external even is the open port check fails (for CJDNS and Yggdrasil support) - - Add IPv6 tracker and change unstable tracker - - Don't correct sent local time with the calculated time correction - - Disable CSP for Edge - - Only support CREATE commands in dbschema indexes node and SELECT from storage.query - -### Fixed - - Check the length of master seed when executing cryptGetPrivatekey CLI command - - Only reload source code on file modification / creation - - Detection and issue warning for latest no-script plugin - - Fix atomic write of a non-existent file - - Fix sql queries with lots of variables and sites with lots of content.json - - Fix multi-line parsing of zeronet.conf - - Fix site deletion from users.json - - Fix site cloning before site downloaded (Reported by unsystemizer) - - Fix queryJson for non-list nodes (Reported by MingchenZhang) - - ## ZeroNet 0.6.4 (2018-10-20) Rev3660 ### Added - New plugin: UiConfig. A web interface that allows changing ZeroNet settings. diff --git a/COPYING b/COPYING deleted file mode 100644 index f288702d..00000000 --- a/COPYING +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/Dockerfile b/Dockerfile index 3f1d3c18..75d18a37 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,33 +1,27 @@ -FROM alpine:3.15 +FROM alpine:3.8 #Base settings ENV HOME /root -COPY requirements.txt /root/requirements.txt - -#Install ZeroNet -RUN apk --update --no-cache --no-progress add python3 python3-dev py3-pip gcc g++ autoconf automake libtool libffi-dev musl-dev make tor openssl \ - && pip3 install -r /root/requirements.txt \ - && apk del python3-dev gcc g++ autoconf automake libtool libffi-dev musl-dev make \ - && echo "ControlPort 9051" >> /etc/tor/torrc \ - && echo "CookieAuthentication 1" >> /etc/tor/torrc - -RUN python3 -V \ - && python3 -m pip list \ - && tor --version \ - && openssl version - #Add Zeronet source COPY . /root + +#Install ZeroNet +RUN apk --no-cache --no-progress add python3 python3-dev gcc libffi-dev musl-dev make tor openssl \ + && pip3 install -r /root/requirements.txt \ + && apk del python3-dev gcc libffi-dev musl-dev make \ + && echo "ControlPort 9051" >> /etc/tor/torrc \ + && echo "CookieAuthentication 1" >> /etc/tor/torrc + VOLUME /root/data #Control if Tor proxy is started -ENV ENABLE_TOR true +ENV ENABLE_TOR false WORKDIR /root #Set upstart command -CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26117 +CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552 #Expose ports -EXPOSE 43110 26117 +EXPOSE 43110 26552 diff --git a/Dockerfile.arm64v8 b/Dockerfile.arm64v8 deleted file mode 100644 index d27b7620..00000000 --- a/Dockerfile.arm64v8 +++ /dev/null @@ -1,34 +0,0 @@ -FROM alpine:3.12 - -#Base settings -ENV HOME /root - -COPY requirements.txt /root/requirements.txt - -#Install ZeroNet -RUN apk --update --no-cache --no-progress add python3 python3-dev gcc libffi-dev musl-dev make tor openssl \ - && pip3 install -r /root/requirements.txt \ - && apk del python3-dev gcc libffi-dev musl-dev make \ - && echo "ControlPort 9051" >> /etc/tor/torrc \ - && echo "CookieAuthentication 1" >> /etc/tor/torrc - -RUN python3 -V \ - && python3 -m pip list \ - && tor --version \ - && openssl version - -#Add Zeronet source -COPY . /root -VOLUME /root/data - -#Control if Tor proxy is started -ENV ENABLE_TOR false - -WORKDIR /root - -#Set upstart command -CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552 - -#Expose ports -EXPOSE 43110 26552 - diff --git a/LICENSE b/LICENSE index 0d17b72d..d6a93266 100644 --- a/LICENSE +++ b/LICENSE @@ -1,27 +1,340 @@ -This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, version 3. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program. If not, see . - - -Additional Conditions : - -Contributing to this repo - This repo is governed by GPLv3, same is located at the root of the ZeroNet git repo, - unless specified separately all code is governed by that license, contributions to this repo - are divided into two key types, key contributions and non-key contributions, key contributions - are which, directly affects the code performance, quality and features of software, - non key contributions include things like translation datasets, image, graphic or video - contributions that does not affect the main usability of software but improves the existing - usability of certain thing or feature, these also include tests written with code, since their - purpose is to check, whether something is working or not as intended. All the non-key contributions - are governed by [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), unless specified - above, a contribution is ruled by the type of contribution if there is a conflict between two - contributing parties of repo in any case. +GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {description} + Copyright (C) {year} {fullname} + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + {signature of Ty Coon}, 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. + diff --git a/README-ru.md b/README-ru.md index 7d557727..75abbfab 100644 --- a/README-ru.md +++ b/README-ru.md @@ -1,133 +1,211 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) [简体中文](./README-zh-cn.md) [English](./README.md) -Децентрализованные вебсайты, использующие криптографию Bitcoin и протокол BitTorrent — https://zeronet.dev ([Зеркало в ZeroNet](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/)). В отличии от Bitcoin, ZeroNet'у не требуется блокчейн для работы, однако он использует ту же криптографию, чтобы обеспечить сохранность и проверку данных. +Децентрализованные вебсайты использующие Bitcoin криптографию и BitTorrent сеть - https://zeronet.io + ## Зачем? -- Мы верим в открытую, свободную, и неподдающуюся цензуре сеть и связь. -- Нет единой точки отказа: Сайт остаётся онлайн, пока его обслуживает хотя бы 1 пир. -- Нет затрат на хостинг: Сайты обслуживаются посетителями. -- Невозможно отключить: Он нигде, потому что он везде. -- Скорость и возможность работать без Интернета: Вы сможете получить доступ к сайту, потому что его копия хранится на вашем компьютере и у ваших пиров. +* Мы верим в открытую, свободную, и не отцензуренную сеть и коммуникацию. +* Нет единой точки отказа: Сайт онлайн пока по крайней мере 1 пир обслуживает его. +* Никаких затрат на хостинг: Сайты обслуживаются посетителями. +* Невозможно отключить: Он нигде, потому что он везде. +* Быстр и работает оффлайн: Вы можете получить доступ к сайту, даже если Интернет недоступен. + ## Особенности + * Обновляемые в реальном времени сайты + * Поддержка Namecoin .bit доменов + * Лёгок в установке: распаковал & запустил + * Клонирование вебсайтов в один клик + * Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) + based authorization: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек + * Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы + * Анонимность: Полная поддержка сети Tor с помощью скрытых служб .onion вместо адресов IPv4 + * TLS зашифрованные связи + * Автоматическое открытие uPnP порта + * Плагин для поддержки многопользовательской (openproxy) + * Работает с любыми браузерами и операционными системами -- Обновление сайтов в реальном времени -- Поддержка доменов `.bit` ([Namecoin](https://www.namecoin.org)) -- Легкая установка: просто распакуйте и запустите -- Клонирование сайтов "в один клик" -- Беспарольная [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - авторизация: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек -- Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы -- Анонимность: Полная поддержка сети Tor, используя скрытые службы `.onion` вместо адресов IPv4 -- Зашифрованное TLS подключение -- Автоматическое открытие UPnP–порта -- Плагин для поддержки нескольких пользователей (openproxy) -- Работа с любыми браузерами и операционными системами - -## Текущие ограничения - -- Файловые транзакции не сжаты -- Нет приватных сайтов ## Как это работает? -- После запуска `zeronet.py` вы сможете посещать сайты в ZeroNet, используя адрес - `http://127.0.0.1:43110/{zeronet_адрес}` - (Например: `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). -- Когда вы посещаете новый сайт в ZeroNet, он пытается найти пиров с помощью протокола BitTorrent, - чтобы скачать у них файлы сайта (HTML, CSS, JS и т.д.). -- После посещения сайта вы тоже становитесь его пиром. -- Каждый сайт содержит файл `content.json`, который содержит SHA512 хеши всех остальные файлы - и подпись, созданную с помощью закрытого ключа сайта. -- Если владелец сайта (тот, кто владеет закрытым ключом для адреса сайта) изменяет сайт, он +* После запуска `zeronet.py` вы сможете посетить зайты (zeronet сайты) используя адрес + `http://127.0.0.1:43110/{zeronet_address}` +(например. `http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`). +* Когда вы посещаете новый сайт zeronet, он пытается найти пиров с помощью BitTorrent + чтобы загрузить файлы сайтов (html, css, js ...) из них. +* Каждый посещенный зайт также обслуживается вами. (Т.е хранится у вас на компьютере) +* Каждый сайт содержит файл `content.json`, который содержит все остальные файлы в хэше sha512 + и подпись, созданную с использованием частного ключа сайта. +* Если владелец сайта (у которого есть закрытый ключ для адреса сайта) изменяет сайт, то он/она подписывает новый `content.json` и публикует его для пиров. После этого пиры проверяют целостность `content.json` - (используя подпись), скачвают изменённые файлы и распространяют новый контент для других пиров. + (используя подпись), они загружают измененные файлы и публикуют новый контент для других пиров. + +#### [Слайд-шоу о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) +#### [Часто задаваемые вопросы »](https://zeronet.io/docs/faq/) + +#### [Документация разработчика ZeroNet »](https://zeronet.io/docs/site_development/getting_started/) -[Презентация о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -[Часто задаваемые вопросы »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) -[Документация разработчика ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Скриншоты ![Screenshot](https://i.imgur.com/H60OAHY.png) ![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -[Больше скриншотов в документации ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) -## Как присоединиться? +#### [Больше скриншотов в ZeroNet документации »](https://zeronet.io/docs/using_zeronet/sample_sites/) -### Windows -- Скачайте и распакуйте архив [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26МБ) -- Запустите `ZeroNet.exe` +## Как вступить -### macOS +* Скачайте ZeroBundle пакет: + * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) + * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) + * [Linux 64-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) + * [Linux 32-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) +* Распакуйте где угодно +* Запустите `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) -- Скачайте и распакуйте архив [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14МБ) -- Запустите `ZeroNet.app` +### Linux терминал -### Linux (64 бит) +* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` +* `tar xvpfz ZeroBundle-linux64.tar.gz` +* `cd ZeroBundle` +* Запустите с помощью `./ZeroNet.sh` -- Скачайте и распакуйте архив [ZeroNet-linux.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip) (14МБ) -- Запустите `./ZeroNet.sh` +Он загружает последнюю версию ZeroNet, затем запускает её автоматически. -> **Note** -> Запустите таким образом: `./ZeroNet.sh --ui_ip '*' --ui_restrict ваш_ip_адрес`, чтобы разрешить удалённое подключение к веб–интерфейсу. +#### Ручная установка для Debian Linux -### Docker +* `sudo apt-get update` +* `sudo apt-get install msgpack-python python-gevent` +* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` +* `tar xvpfz master.tar.gz` +* `cd ZeroNet-master` +* Запустите с помощью `python2 zeronet.py` +* Откройте http://127.0.0.1:43110/ в вашем браузере. -Официальный образ находится здесь: https://hub.docker.com/r/canewsin/zeronet/ +### [Arch Linux](https://www.archlinux.org) -### Android (arm, arm64, x86) +* `git clone https://aur.archlinux.org/zeronet.git` +* `cd zeronet` +* `makepkg -srci` +* `systemctl start zeronet` +* Откройте http://127.0.0.1:43110/ в вашем браузере. -- Для работы требуется Android как минимум версии 5.0 Lollipop -- [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) -- Скачать APK: https://github.com/canewsin/zeronet_mobile/releases +Смотрите [ArchWiki](https://wiki.archlinux.org)'s [ZeroNet +article](https://wiki.archlinux.org/index.php/ZeroNet) для дальнейшей помощи. -### Android (arm, arm64, x86) Облегчённый клиент только для просмотра (1МБ) +### [Gentoo Linux](https://www.gentoo.org) -- Для работы требуется Android как минимум версии 4.1 Jelly Bean -- [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) +* [`layman -a raiagent`](https://github.com/leycec/raiagent) +* `echo '>=net-vpn/zeronet-0.5.4' >> /etc/portage/package.accept_keywords` +* *(Опционально)* Включить поддержку Tor: `echo 'net-vpn/zeronet tor' >> + /etc/portage/package.use` +* `emerge zeronet` +* `rc-service zeronet start` +* Откройте http://127.0.0.1:43110/ в вашем браузере. -### Установка из исходного кода +Смотрите `/usr/share/doc/zeronet-*/README.gentoo.bz2` для дальнейшей помощи. -```sh -wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip -unzip ZeroNet-src.zip -cd ZeroNet -sudo apt-get update -sudo apt-get install python3-pip -sudo python3 -m pip install -r requirements.txt +### [FreeBSD](https://www.freebsd.org/) + +* `pkg install zeronet` or `cd /usr/ports/security/zeronet/ && make install clean` +* `sysrc zeronet_enable="YES"` +* `service zeronet start` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +### [Vagrant](https://www.vagrantup.com/) + +* `vagrant up` +* Подключитесь к VM с помощью `vagrant ssh` +* `cd /vagrant` +* Запустите `python2 zeronet.py --ui_ip 0.0.0.0` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +### [Docker](https://www.docker.com/) +* `docker run -d -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` +* Это изображение Docker включает в себя прокси-сервер Tor, который по умолчанию отключён. + Остерегайтесь что некоторые хостинг-провайдеры могут не позволить вам запускать Tor на своих серверах. + Если вы хотите включить его,установите переменную среды `ENABLE_TOR` в` true` (по умолчанию: `false`) Например: + + `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) + +* `virtualenv env` +* `source env/bin/activate` +* `pip install msgpack gevent` +* `python2 zeronet.py` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +## Текущие ограничения + +* ~~Нет torrent-похожего файла разделения для поддержки больших файлов~~ (поддержка больших файлов добавлена) +* ~~Не анонимнее чем Bittorrent~~ (добавлена встроенная поддержка Tor) +* Файловые транзакции не сжаты ~~ или незашифрованы еще ~~ (добавлено шифрование TLS) +* Нет приватных сайтов + + +## Как я могу создать сайт в Zeronet? + +Завершите работу zeronet, если он запущен + +```bash +$ zeronet.py siteCreate +... +- Site private key (Приватный ключ сайта): 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq +- Site address (Адрес сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +- Site created! (Сайт создан) +$ zeronet.py +... ``` -- Запустите `python3 zeronet.py` -Откройте приветственную страницу ZeroHello в вашем браузере по ссылке http://127.0.0.1:43110/ +Поздравляем, вы закончили! Теперь каждый может получить доступ к вашему зайту используя +`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` -## Как мне создать сайт в ZeroNet? +Следующие шаги: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/) -- Кликните на **⋮** > **"Create new, empty site"** в меню на сайте [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). -- Вы будете **перенаправлены** на совершенно новый сайт, который может быть изменён только вами! -- Вы можете найти и изменить контент вашего сайта в каталоге **data/[адрес_вашего_сайта]** -- После изменений откройте ваш сайт, переключите влево кнопку "0" в правом верхнем углу, затем нажмите кнопки **sign** и **publish** внизу -Следующие шаги: [Документация разработчика ZeroNet](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +## Как я могу модифицировать Zeronet сайт? + +* Измените файлы расположенные в data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 директории. + Когда закончите с изменением: + +```bash +$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +- Signing site (Подпись сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... +Private key (Приватный ключ) (input hidden): +``` + +* Введите секретный ключ, который вы получили при создании сайта, потом: + +```bash +$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +Site:13DNDk..bhC2 Publishing to 3/10 peers... +Site:13DNDk..bhC2 Successfuly published to 3 peers +- Serving files.... +``` + +* Вот и всё! Вы успешно подписали и опубликовали свои изменения. + ## Поддержите проект -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Рекомендуем) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Другие способы: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) +- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX +- Paypal: https://zeronet.io/docs/help_zeronet/donate/ + +### Спонсоры + +* Улучшенная совместимость с MacOS / Safari стала возможной благодаря [BrowserStack.com](https://www.browserstack.com) #### Спасибо! -- Здесь вы можете получить больше информации, помощь, прочитать список изменений и исследовать ZeroNet сайты: https://www.reddit.com/r/zeronetx/ -- Общение происходит на канале [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или в [Gitter](https://gitter.im/canewsin/ZeroNet) -- Электронная почта: canews.in@gmail.com +* Больше информации, помощь, журнал изменений, zeronet сайты: https://www.reddit.com/r/zeronet/ +* Приходите, пообщайтесь с нами: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или на [gitter](https://gitter.im/HelloZeroNet/ZeroNet) +* Email: hello@zeronet.io (PGP: CB9613AE) diff --git a/README-zh-cn.md b/README-zh-cn.md index 37095ff6..103194ea 100644 --- a/README-zh-cn.md +++ b/README-zh-cn.md @@ -1,49 +1,51 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) [English](./README.md) -使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.dev +使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.io -## 为什么? +## 为什么? -* 我们相信开放,自由,无审查的网络和通讯 +* 我们相信开放,自由,无审查的网络 * 不会受单点故障影响:只要有在线的节点,站点就会保持在线 -* 无托管费用:站点由访问者托管 -* 无法关闭:因为节点无处不在 -* 快速并可离线运行:即使没有互联网连接也可以使用 +* 无托管费用: 站点由访问者托管 +* 无法关闭: 因为节点无处不在 +* 快速并可离线运行: 即使没有互联网连接也可以使用 ## 功能 * 实时站点更新 * 支持 Namecoin 的 .bit 域名 - * 安装方便:只需解压并运行 + * 安装方便: 只需解压并运行 * 一键克隆存在的站点 - * 无需密码、基于 [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - 的认证:您的账户被与比特币钱包相同的加密方法保护 - * 内建 SQL 服务器和 P2P 数据同步:让开发更简单并提升加载速度 - * 匿名性:完整的 Tor 网络支持,支持通过 .onion 隐藏服务相互连接而不是通过 IPv4 地址连接 + * 无需密码、基于 [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) 的认证:用与比特币钱包相同的加密方法用来保护你的账户 +你的账户被使用和比特币钱包相同的加密方法 + * 内建 SQL 服务器和 P2P 数据同步: 让开发更简单并提升加载速度 + * 匿名性: 完整的 Tor 网络支持,支持通过 .onion 隐藏服务相互连接而不是通过IPv4地址连接 * TLS 加密连接 * 自动打开 uPnP 端口 - * 多用户(openproxy)支持的插件 - * 适用于任何浏览器 / 操作系统 + * 插件和多用户 (开放式代理) 支持 + * 全平台兼容 ## 原理 -* 在运行 `zeronet.py` 后,您将可以通过 - `http://127.0.0.1:43110/{zeronet_address}`(例如: - `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`)访问 zeronet 中的站点 -* 在您浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点,从而下载需要的文件(html,css,js...) -* 您将会储存每一个浏览过的站点 -* 每个站点都包含一个名为 `content.json` 的文件,它储存了其他所有文件的 sha512 散列值以及一个通过站点私钥生成的签名 -* 如果站点的所有者(拥有站点地址的私钥)修改了站点,并且他 / 她签名了新的 `content.json` 然后推送至其他节点, - 那么这些节点将会在使用签名验证 `content.json` 的真实性后,下载修改后的文件并将新内容推送至另外的节点 +* 在你运行`zeronet.py`后你将可以通过`http://127.0.0.1:43110/{zeronet_address}` (比如. +`http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`)。访问 zeronet 中的站点。 -#### [关于 ZeroNet 加密,站点更新,多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [常见问题 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) +* 在你浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点,从而下载需要的文件 (html, css, js...) -#### [ZeroNet 开发者文档 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +* 你将会储存每一个浏览过的站点 +* 每个站点都包含一个名为 `content.json` ,它储存了其他所有文件的 sha512 hash 值 + 和一个通过站点私钥建立的签名 +* 如果站点的所有者 (拥有私钥的那个人) 修改了站点, 并且他/她签名了新的 `content.json` 然后推送至其他节点, +那么所有节点将会在验证 `content.json` 的真实性 (使用签名)后, 下载修改后的文件并推送至其他节点。 + +#### [有关于 ZeroNet 加密, 站点更新, 多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1qBxkroB_iiX2zHEn0dt-N-qRZgyEzui46XS2hEa3AA4/pub?start=false&loop=false&delayms=3000) +#### [常见问题 »](https://zeronet.io/docs/faq/) + +#### [ZeroNet开发者文档 »](https://zeronet.io/docs/site_development/getting_started/) ## 屏幕截图 @@ -51,82 +53,136 @@ ![Screenshot](https://i.imgur.com/H60OAHY.png) ![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -#### [ZeroNet 文档中的更多屏幕截图 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) +#### [在 ZeroNet 文档里查看更多的屏幕截图 »](https://zeronet.io/docs/using_zeronet/sample_sites/) -## 如何加入 +## 如何加入 ? -### Windows +* 下载 ZeroBundle 文件包: + * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) + * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) + * [Linux 64bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) + * [Linux 32bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) +* 解压缩 +* 运行 `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) - - 下载 [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) - - 在任意位置解压缩 - - 运行 `ZeroNet.exe` - -### macOS +### Linux 命令行 - - 下载 [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) - - 在任意位置解压缩 - - 运行 `ZeroNet.app` - -### Linux (x86-64bit) +* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` +* `tar xvpfz ZeroBundle-linux64.tar.gz` +* `cd ZeroBundle` +* 执行 `./ZeroNet.sh` 来启动 - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` - - `unzip ZeroNet-linux.zip` - - `cd ZeroNet-linux` - - 使用以下命令启动 `./ZeroNet.sh` - - 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面 - - __提示:__ 若要允许在 Web 界面上的远程连接,使用以下命令启动 `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` +在你打开时他将会自动下载最新版本的 ZeroNet 。 -### 从源代码安装 +#### 在 Debian Linux 中手动安装 - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` - - `unzip ZeroNet-src.zip` - - `cd ZeroNet` - - `sudo apt-get update` - - `sudo apt-get install python3-pip` - - `sudo python3 -m pip install -r requirements.txt` - - 使用以下命令启动 `python3 zeronet.py` - - 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面 +* `sudo apt-get update` +* `sudo apt-get install msgpack-python python-gevent` +* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` +* `tar xvpfz master.tar.gz` +* `cd ZeroNet-master` +* 执行 `python2 zeronet.py` 来启动 +* 在你的浏览器中打开 http://127.0.0.1:43110/ - ### Android (arm, arm64, x86) - - minimum Android version supported 21 (Android 5.0 Lollipop) - - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) - - APK download: https://github.com/canewsin/zeronet_mobile/releases +### [FreeBSD](https://www.freebsd.org/) -### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) - - minimum Android version supported 16 (JellyBean) - - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) +* `pkg install zeronet` 或者 `cd /usr/ports/security/zeronet/ && make install clean` +* `sysrc zeronet_enable="YES"` +* `service zeronet start` +* 在你的浏览器中打开 http://127.0.0.1:43110/ + +### [Vagrant](https://www.vagrantup.com/) + +* `vagrant up` +* 通过 `vagrant ssh` 连接到 VM +* `cd /vagrant` +* 运行 `python2 zeronet.py --ui_ip 0.0.0.0` +* 在你的浏览器中打开 http://127.0.0.1:43110/ + +### [Docker](https://www.docker.com/) +* `docker run -d -v :/root/data -p 26552:26552 -p 43110:43110 nofish/zeronet` +* 这个 Docker 镜像包含了 Tor ,但默认是禁用的,因为一些托管商不允许你在他们的服务器上运行 Tor。如果你希望启用它, +设置 `ENABLE_TOR` 环境变量为 `true` (默认: `false`). E.g.: + + `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 26552:26552 -p 43110:43110 nofish/zeronet` +* 在你的浏览器中打开 http://127.0.0.1:43110/ + +### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) + +* `virtualenv env` +* `source env/bin/activate` +* `pip install msgpack gevent` +* `python2 zeronet.py` +* 在你的浏览器中打开 http://127.0.0.1:43110/ ## 现有限制 -* 传输文件时没有压缩 +* ~~没有类似于 BitTorrent 的文件拆分来支持大文件~~ (已添加大文件支持) +* ~~没有比 BitTorrent 更好的匿名性~~ (已添加内置的完整 Tor 支持) +* 传输文件时没有压缩~~和加密~~ (已添加 TLS 支持) * 不支持私有站点 -## 如何创建一个 ZeroNet 站点? +## 如何创建一个 ZeroNet 站点? - * 点击 [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d) 站点的 **⋮** > **「新建空站点」** 菜单项 - * 您将被**重定向**到一个全新的站点,该站点只能由您修改 - * 您可以在 **data/[您的站点地址]** 目录中找到并修改网站的内容 - * 修改后打开您的网站,将右上角的「0」按钮拖到左侧,然后点击底部的**签名**并**发布**按钮 -接下来的步骤:[ZeroNet 开发者文档](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +如果 zeronet 在运行,把它关掉 +执行: +```bash +$ zeronet.py siteCreate +... +- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq +- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +- Site created! +$ zeronet.py +... +``` + +你已经完成了! 现在任何人都可以通过 +`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` +来访问你的站点 + +下一步: [ZeroNet 开发者文档](https://zeronet.io/docs/site_development/getting_started/) + + +## 我要如何修改 ZeroNet 站点? + +* 修改位于 data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 的目录. + 在你改好之后: + +```bash +$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... +Private key (input hidden): +``` + +* 输入你在创建站点时获得的私钥 + +```bash +$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +Site:13DNDk..bhC2 Publishing to 3/10 peers... +Site:13DNDk..bhC2 Successfuly published to 3 peers +- Serving files.... +``` + +* 就是这样! 你现在已经成功的签名并推送了你的更改。 + ## 帮助这个项目 -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) +- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX +- Paypal: https://zeronet.io/docs/help_zeronet/donate/ -#### 感谢您! +### 赞助商 -* 更多信息,帮助,变更记录和 zeronet 站点:https://www.reddit.com/r/zeronetx/ -* 前往 [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 或 [gitter](https://gitter.im/canewsin/ZeroNet) 和我们聊天 -* [这里](https://gitter.im/canewsin/ZeroNet)是一个 gitter 上的中文聊天室 -* Email: canews.in@gmail.com +* 在 OSX/Safari 下 [BrowserStack.com](https://www.browserstack.com) 带来更好的兼容性 + +#### 感谢! + +* 更多信息, 帮助, 变更记录和 zeronet 站点: https://www.reddit.com/r/zeronet/ +* 在: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 和我们聊天,或者使用 [gitter](https://gitter.im/HelloZeroNet/ZeroNet) +* [这里](https://gitter.im/ZeroNet-zh/Lobby)是一个 gitter 上的中文聊天室 +* Email: hello@noloop.me diff --git a/README.md b/README.md index 70b79adc..708116e3 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) - -Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.dev / [ZeroNet Site](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/), Unlike Bitcoin, ZeroNet Doesn't need a blockchain to run, But uses cryptography used by BTC, to ensure data integrity and validation. +__Warning: Development test version, do not use on live data__ + +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) + +Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.io ## Why? @@ -33,22 +35,22 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ * After starting `zeronet.py` you will be able to visit zeronet sites using `http://127.0.0.1:43110/{zeronet_address}` (eg. - `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). + `http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`). * When you visit a new zeronet site, it tries to find peers using the BitTorrent network so it can download the site files (html, css, js...) from them. * Each visited site is also served by you. * Every site contains a `content.json` file which holds all other files in a sha512 hash and a signature generated using the site's private key. * If the site owner (who has the private key for the site address) modifies the - site and signs the new `content.json` and publishes it to the peers. + site, then he/she signs the new `content.json` and publishes it to the peers. Afterwards, the peers verify the `content.json` integrity (using the signature), they download the modified files and publish the new content to other peers. #### [Slideshow about ZeroNet cryptography, site updates, multi-user sites »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [Frequently asked questions »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) +#### [Frequently asked questions »](https://zeronet.io/docs/faq/) -#### [ZeroNet Developer Documentation »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +#### [ZeroNet Developer Documentation »](https://zeronet.io/docs/site_development/getting_started/) ## Screenshots @@ -56,101 +58,116 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ ![Screenshot](https://i.imgur.com/H60OAHY.png) ![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -#### [More screenshots in ZeroNet docs »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) +#### [More screenshots in ZeroNet docs »](https://zeronet.io/docs/using_zeronet/sample_sites/) ## How to join -### Windows - - - Download [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) - - Unpack anywhere - - Run `ZeroNet.exe` - -### macOS - - - Download [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) - - Unpack anywhere - - Run `ZeroNet.app` - -### Linux (x86-64bit) - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` - - `unzip ZeroNet-linux.zip` - - `cd ZeroNet-linux` - - Start with: `./ZeroNet.sh` - - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ - - __Tip:__ Start with `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` to allow remote connections on the web interface. - - ### Android (arm, arm64, x86) - - minimum Android version supported 21 (Android 5.0 Lollipop) - - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) - - APK download: https://github.com/canewsin/zeronet_mobile/releases - -### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) - - minimum Android version supported 16 (JellyBean) - - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) - - -#### Docker -There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/ - -### Online Proxies -Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one. - -#### Official ZNX Proxy : - -https://proxy.zeronet.dev/ - -https://zeronet.dev/ - -#### From Community - -https://0net-preview.com/ - -https://portal.ngnoid.tv/ - -https://zeronet.ipfsscan.io/ +### Install from package for your distribution +* Arch Linux: [zeronet](https://aur.archlinux.org/zeronet.git), [zeronet-git](https://aur.archlinux.org/zeronet-git.git) +* Gentoo: [emerge repository](https://github.com/leycec/raiagent) +* FreeBSD: zeronet +* Whonix: [instructions](https://www.whonix.org/wiki/ZeroNet) ### Install from source - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` - - `unzip ZeroNet-src.zip` - - `cd ZeroNet` - - `sudo apt-get update` - - `sudo apt-get install python3-pip` - - `sudo python3 -m pip install -r requirements.txt` - - Start with: `python3 zeronet.py` - - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ +Fetch and extract the source: + + wget https://github.com/HelloZeroNet/ZeroNet/archive/py3.tar.gz + tar xvpfz py3.tar.gz + cd ZeroNet-py3 + +Install Python module dependencies either: + +* (Option A) into a [virtual env](https://virtualenv.readthedocs.org/en/latest/) + + ``` + virtualenv zeronet + source zeronet/bin/activate + python -m pip install -r requirements.txt + ``` + +* (Option B) into the system (requires root), for example, on Debian/Ubuntu: + + ``` + sudo apt-get update + sudo apt-get install python3-pip + sudo python3 -m pip install -r requirements.txt + ``` + +Start Zeronet: + + python3 zeronet.py + +Open the ZeroHello landing page in your browser by navigating to: + + http://127.0.0.1:43110/ ## Current limitations -* File transactions are not compressed +* ~~No torrent-like file splitting for big file support~~ (big file support added) +* ~~No more anonymous than Bittorrent~~ (built-in full Tor support added) +* File transactions are not compressed ~~or encrypted yet~~ (TLS encryption added) * No private sites ## How can I create a ZeroNet site? - * Click on **⋮** > **"Create new, empty site"** menu item on the site [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). - * You will be **redirected** to a completely new site that is only modifiable by you! - * You can find and modify your site's content in **data/[yoursiteaddress]** directory - * After the modifications open your site, drag the topright "0" button to left, then press **sign** and **publish** buttons on the bottom +Shut down zeronet if you are running it already + +```bash +$ zeronet.py siteCreate +... +- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq +- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +- Site created! +$ zeronet.py +... +``` + +Congratulations, you're finished! Now anyone can access your site using +`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` + +Next steps: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/) + + +## How can I modify a ZeroNet site? + +* Modify files located in data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 directory. + After you're finished: + +```bash +$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... +Private key (input hidden): +``` + +* Enter the private key you got when you created the site, then: + +```bash +$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +Site:13DNDk..bhC2 Publishing to 3/10 peers... +Site:13DNDk..bhC2 Successfuly published to 3 peers +- Serving files.... +``` + +* That's it! You've successfully signed and published your modifications. -Next steps: [ZeroNet Developer Documentation](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Help keep this project alive -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) + +- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX +- Paypal: https://zeronet.io/docs/help_zeronet/donate/ + +### Sponsors + +* Better macOS/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com) #### Thank you! -* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronetx/ -* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/canewsin/ZeroNet) -* Email: canews.in@gmail.com +* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronet/ +* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/HelloZeroNet/ZeroNet) +* Email: hello@zeronet.io (PGP: CB9613AE) diff --git a/plugins b/plugins deleted file mode 160000 index 689d9309..00000000 --- a/plugins +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb diff --git a/plugins/AnnounceLocal/AnnounceLocalPlugin.py b/plugins/AnnounceLocal/AnnounceLocalPlugin.py new file mode 100644 index 00000000..0919762a --- /dev/null +++ b/plugins/AnnounceLocal/AnnounceLocalPlugin.py @@ -0,0 +1,148 @@ +import time + +import gevent + +from Plugin import PluginManager +from Config import config +from . import BroadcastServer + + +@PluginManager.registerTo("SiteAnnouncer") +class SiteAnnouncerPlugin(object): + def announce(self, force=False, *args, **kwargs): + local_announcer = self.site.connection_server.local_announcer + + thread = None + if local_announcer and (force or time.time() - local_announcer.last_discover > 5 * 60): + thread = gevent.spawn(local_announcer.discover, force=force) + back = super(SiteAnnouncerPlugin, self).announce(force=force, *args, **kwargs) + + if thread: + thread.join() + + return back + + +class LocalAnnouncer(BroadcastServer.BroadcastServer): + def __init__(self, server, listen_port): + super(LocalAnnouncer, self).__init__("zeronet", listen_port=listen_port) + self.server = server + + self.sender_info["peer_id"] = self.server.peer_id + self.sender_info["port"] = self.server.port + self.sender_info["broadcast_port"] = listen_port + self.sender_info["rev"] = config.rev + + self.known_peers = {} + self.last_discover = 0 + + def discover(self, force=False): + self.log.debug("Sending discover request (force: %s)" % force) + self.last_discover = time.time() + if force: # Probably new site added, clean cache + self.known_peers = {} + + for peer_id, known_peer in list(self.known_peers.items()): + if time.time() - known_peer["found"] > 20 * 60: + del(self.known_peers[peer_id]) + self.log.debug("Timeout, removing from known_peers: %s" % peer_id) + self.broadcast({"cmd": "discoverRequest", "params": {}}, port=self.listen_port) + + def actionDiscoverRequest(self, sender, params): + back = { + "cmd": "discoverResponse", + "params": { + "sites_changed": self.server.site_manager.sites_changed + } + } + + if sender["peer_id"] not in self.known_peers: + self.known_peers[sender["peer_id"]] = {"added": time.time(), "sites_changed": 0, "updated": 0, "found": time.time()} + self.log.debug("Got discover request from unknown peer %s (%s), time to refresh known peers" % (sender["ip"], sender["peer_id"])) + gevent.spawn_later(1.0, self.discover) # Let the response arrive first to the requester + + return back + + def actionDiscoverResponse(self, sender, params): + if sender["peer_id"] in self.known_peers: + self.known_peers[sender["peer_id"]]["found"] = time.time() + if params["sites_changed"] != self.known_peers.get(sender["peer_id"], {}).get("sites_changed"): + # Peer's site list changed, request the list of new sites + return {"cmd": "siteListRequest"} + else: + # Peer's site list is the same + for site in self.server.sites.values(): + peer = site.peers.get("%s:%s" % (sender["ip"], sender["port"])) + if peer: + peer.found("local") + + def actionSiteListRequest(self, sender, params): + back = [] + sites = list(self.server.sites.values()) + + # Split adresses to group of 100 to avoid UDP size limit + site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)] + for site_group in site_groups: + res = {} + res["sites_changed"] = self.server.site_manager.sites_changed + res["sites"] = [site.address_hash for site in site_group] + back.append({"cmd": "siteListResponse", "params": res}) + return back + + def actionSiteListResponse(self, sender, params): + s = time.time() + peer_sites = set(params["sites"]) + num_found = 0 + added_sites = [] + for site in self.server.sites.values(): + if site.address_hash in peer_sites: + added = site.addPeer(sender["ip"], sender["port"], source="local") + num_found += 1 + if added: + site.worker_manager.onPeers() + site.updateWebsocket(peers_added=1) + added_sites.append(site) + + # Save sites changed value to avoid unnecessary site list download + if sender["peer_id"] not in self.known_peers: + self.known_peers[sender["peer_id"]] = {"added": time.time()} + + self.known_peers[sender["peer_id"]]["sites_changed"] = params["sites_changed"] + self.known_peers[sender["peer_id"]]["updated"] = time.time() + self.known_peers[sender["peer_id"]]["found"] = time.time() + + self.log.debug( + "Tracker result: Discover from %s response parsed in %.3fs, found: %s added: %s of %s" % + (sender["ip"], time.time() - s, num_found, added_sites, len(peer_sites)) + ) + + +@PluginManager.registerTo("FileServer") +class FileServerPlugin(object): + def __init__(self, *args, **kwargs): + res = super(FileServerPlugin, self).__init__(*args, **kwargs) + if config.broadcast_port and config.tor != "always" and not config.disable_udp: + self.local_announcer = LocalAnnouncer(self, config.broadcast_port) + else: + self.local_announcer = None + return res + + def start(self, *args, **kwargs): + if self.local_announcer: + gevent.spawn(self.local_announcer.start) + return super(FileServerPlugin, self).start(*args, **kwargs) + + def stop(self): + if self.local_announcer: + self.local_announcer.stop() + res = super(FileServerPlugin, self).stop() + return res + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("AnnounceLocal plugin") + group.add_argument('--broadcast_port', help='UDP broadcasting port for local peer discovery', default=1544, type=int, metavar='port') + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/AnnounceLocal/BroadcastServer.py b/plugins/AnnounceLocal/BroadcastServer.py new file mode 100644 index 00000000..74678896 --- /dev/null +++ b/plugins/AnnounceLocal/BroadcastServer.py @@ -0,0 +1,139 @@ +import socket +import logging +import time +from contextlib import closing + +from Debug import Debug +from util import UpnpPunch +from util import Msgpack + + +class BroadcastServer(object): + def __init__(self, service_name, listen_port=1544, listen_ip=''): + self.log = logging.getLogger("BroadcastServer") + self.listen_port = listen_port + self.listen_ip = listen_ip + + self.running = False + self.sock = None + self.sender_info = {"service": service_name} + + def createBroadcastSocket(self): + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if hasattr(socket, 'SO_REUSEPORT'): + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except Exception as err: + self.log.warning("Error setting SO_REUSEPORT: %s" % err) + + binded = False + for retry in range(3): + try: + sock.bind((self.listen_ip, self.listen_port)) + binded = True + break + except Exception as err: + self.log.error( + "Socket bind to %s:%s error: %s, retry #%s" % + (self.listen_ip, self.listen_port, Debug.formatException(err), retry) + ) + time.sleep(retry) + + if binded: + return sock + else: + return False + + def start(self): # Listens for discover requests + self.sock = self.createBroadcastSocket() + if not self.sock: + self.log.error("Unable to listen on port %s" % self.listen_port) + return + + self.log.debug("Started on port %s" % self.listen_port) + + self.running = True + + while self.running: + try: + data, addr = self.sock.recvfrom(8192) + except Exception as err: + if self.running: + self.log.error("Listener receive error: %s" % err) + continue + + if not self.running: + break + + try: + message = Msgpack.unpack(data) + response_addr, message = self.handleMessage(addr, message) + if message: + self.send(response_addr, message) + except Exception as err: + self.log.error("Handlemessage error: %s" % Debug.formatException(err)) + self.log.debug("Stopped listening on port %s" % self.listen_port) + + def stop(self): + self.log.debug("Stopping, socket: %s" % self.sock) + self.running = False + if self.sock: + self.sock.close() + + def send(self, addr, message): + if type(message) is not list: + message = [message] + + for message_part in message: + message_part["sender"] = self.sender_info + + self.log.debug("Send to %s: %s" % (addr, message_part["cmd"])) + with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.sendto(Msgpack.pack(message_part), addr) + + def getMyIps(self): + return UpnpPunch._get_local_ips() + + def broadcast(self, message, port=None): + if not port: + port = self.listen_port + + my_ips = self.getMyIps() + addr = ("255.255.255.255", port) + + message["sender"] = self.sender_info + self.log.debug("Broadcast using ips %s on port %s: %s" % (my_ips, port, message["cmd"])) + + for my_ip in my_ips: + try: + with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sock.bind((my_ip, 0)) + sock.sendto(Msgpack.pack(message), addr) + except Exception as err: + self.log.warning("Error sending broadcast using ip %s: %s" % (my_ip, err)) + + def handleMessage(self, addr, message): + self.log.debug("Got from %s: %s" % (addr, message["cmd"])) + cmd = message["cmd"] + params = message.get("params", {}) + sender = message["sender"] + sender["ip"] = addr[0] + + func_name = "action" + cmd[0].upper() + cmd[1:] + func = getattr(self, func_name, None) + + if sender["service"] != "zeronet" or sender["peer_id"] == self.sender_info["peer_id"]: + # Skip messages not for us or sent by us + message = None + elif func: + message = func(sender, params) + else: + self.log.debug("Unknown cmd: %s" % cmd) + message = None + + return (sender["ip"], sender["broadcast_port"]), message diff --git a/plugins/AnnounceLocal/Test/TestAnnounce.py b/plugins/AnnounceLocal/Test/TestAnnounce.py new file mode 100644 index 00000000..4def02ed --- /dev/null +++ b/plugins/AnnounceLocal/Test/TestAnnounce.py @@ -0,0 +1,113 @@ +import time +import copy + +import gevent +import pytest +import mock + +from AnnounceLocal import AnnounceLocalPlugin +from File import FileServer +from Test import Spy + +@pytest.fixture +def announcer(file_server, site): + file_server.sites[site.address] = site + announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server, listen_port=1100) + file_server.local_announcer = announcer + announcer.listen_port = 1100 + announcer.sender_info["broadcast_port"] = 1100 + announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"]) + announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically + gevent.spawn(announcer.start) + time.sleep(0.5) + + assert file_server.local_announcer.running + return file_server.local_announcer + +@pytest.fixture +def announcer_remote(request, site_temp): + file_server_remote = FileServer("127.0.0.1", 1545) + file_server_remote.sites[site_temp.address] = site_temp + announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server_remote, listen_port=1101) + file_server_remote.local_announcer = announcer + announcer.listen_port = 1101 + announcer.sender_info["broadcast_port"] = 1101 + announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"]) + announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically + gevent.spawn(announcer.start) + time.sleep(0.5) + + assert file_server_remote.local_announcer.running + + def cleanup(): + file_server_remote.stop() + request.addfinalizer(cleanup) + + + return file_server_remote.local_announcer + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestAnnounce: + def testSenderInfo(self, announcer): + sender_info = announcer.sender_info + assert sender_info["port"] > 0 + assert len(sender_info["peer_id"]) == 20 + assert sender_info["rev"] > 0 + + def testIgnoreSelfMessages(self, announcer): + # No response to messages that has same peer_id as server + assert not announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": announcer.sender_info, "params": {}})[1] + + # Response to messages with different peer id + sender_info = copy.copy(announcer.sender_info) + sender_info["peer_id"] += "-" + addr, res = announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": sender_info, "params": {}}) + assert res["params"]["sites_changed"] > 0 + + def testDiscoverRequest(self, announcer, announcer_remote): + assert len(announcer_remote.known_peers) == 0 + with Spy.Spy(announcer_remote, "handleMessage") as responses: + announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port) + time.sleep(0.1) + + response_cmds = [response[1]["cmd"] for response in responses] + assert response_cmds == ["discoverResponse", "siteListResponse"] + assert len(responses[-1][1]["params"]["sites"]) == 1 + + # It should only request siteList if sites_changed value is different from last response + with Spy.Spy(announcer_remote, "handleMessage") as responses: + announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port) + time.sleep(0.1) + + response_cmds = [response[1]["cmd"] for response in responses] + assert response_cmds == ["discoverResponse"] + + def testPeerDiscover(self, announcer, announcer_remote, site): + assert announcer.server.peer_id != announcer_remote.server.peer_id + assert len(list(announcer.server.sites.values())[0].peers) == 0 + announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port) + time.sleep(0.1) + assert len(list(announcer.server.sites.values())[0].peers) == 1 + + def testRecentPeerList(self, announcer, announcer_remote, site): + assert len(site.peers_recent) == 0 + assert len(site.peers) == 0 + with Spy.Spy(announcer, "handleMessage") as responses: + announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) + time.sleep(0.1) + assert [response[1]["cmd"] for response in responses] == ["discoverResponse", "siteListResponse"] + assert len(site.peers_recent) == 1 + assert len(site.peers) == 1 + + # It should update peer without siteListResponse + last_time_found = list(site.peers.values())[0].time_found + site.peers_recent.clear() + with Spy.Spy(announcer, "handleMessage") as responses: + announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) + time.sleep(0.1) + assert [response[1]["cmd"] for response in responses] == ["discoverResponse"] + assert len(site.peers_recent) == 1 + assert list(site.peers.values())[0].time_found > last_time_found + + diff --git a/plugins/AnnounceLocal/Test/conftest.py b/plugins/AnnounceLocal/Test/conftest.py new file mode 100644 index 00000000..a88c642c --- /dev/null +++ b/plugins/AnnounceLocal/Test/conftest.py @@ -0,0 +1,4 @@ +from src.Test.conftest import * + +from Config import config +config.broadcast_port = 0 diff --git a/plugins/AnnounceLocal/Test/pytest.ini b/plugins/AnnounceLocal/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/AnnounceLocal/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/AnnounceLocal/__init__.py b/plugins/AnnounceLocal/__init__.py new file mode 100644 index 00000000..5b80abd2 --- /dev/null +++ b/plugins/AnnounceLocal/__init__.py @@ -0,0 +1 @@ +from . import AnnounceLocalPlugin \ No newline at end of file diff --git a/plugins/AnnounceShare/AnnounceSharePlugin.py b/plugins/AnnounceShare/AnnounceSharePlugin.py new file mode 100644 index 00000000..057ce55a --- /dev/null +++ b/plugins/AnnounceShare/AnnounceSharePlugin.py @@ -0,0 +1,190 @@ +import time +import os +import logging +import json +import atexit + +import gevent + +from Config import config +from Plugin import PluginManager +from util import helper + + +class TrackerStorage(object): + def __init__(self): + self.log = logging.getLogger("TrackerStorage") + self.file_path = "%s/trackers.json" % config.data_dir + self.load() + self.time_discover = 0.0 + atexit.register(self.save) + + def getDefaultFile(self): + return {"shared": {}} + + def onTrackerFound(self, tracker_address, type="shared", my=False): + if not tracker_address.startswith("zero://"): + return False + + trackers = self.getTrackers() + added = False + if tracker_address not in trackers: + trackers[tracker_address] = { + "time_added": time.time(), + "time_success": 0, + "latency": 99.0, + "num_error": 0, + "my": False + } + self.log.debug("New tracker found: %s" % tracker_address) + added = True + + trackers[tracker_address]["time_found"] = time.time() + trackers[tracker_address]["my"] = my + return added + + def onTrackerSuccess(self, tracker_address, latency): + trackers = self.getTrackers() + if tracker_address not in trackers: + return False + + trackers[tracker_address]["latency"] = latency + trackers[tracker_address]["time_success"] = time.time() + trackers[tracker_address]["num_error"] = 0 + + def onTrackerError(self, tracker_address): + trackers = self.getTrackers() + if tracker_address not in trackers: + return False + + trackers[tracker_address]["time_error"] = time.time() + trackers[tracker_address]["num_error"] += 1 + + if len(self.getWorkingTrackers()) >= config.working_shared_trackers_limit: + error_limit = 5 + else: + error_limit = 30 + error_limit + + if trackers[tracker_address]["num_error"] > error_limit and trackers[tracker_address]["time_success"] < time.time() - 60 * 60: + self.log.debug("Tracker %s looks down, removing." % tracker_address) + del trackers[tracker_address] + + def getTrackers(self, type="shared"): + return self.file_content.setdefault(type, {}) + + def getWorkingTrackers(self, type="shared"): + trackers = { + key: tracker for key, tracker in self.getTrackers(type).items() + if tracker["time_success"] > time.time() - 60 * 60 + } + return trackers + + def getFileContent(self): + if not os.path.isfile(self.file_path): + open(self.file_path, "w").write("{}") + return self.getDefaultFile() + try: + return json.load(open(self.file_path)) + except Exception as err: + self.log.error("Error loading trackers list: %s" % err) + return self.getDefaultFile() + + def load(self): + self.file_content = self.getFileContent() + + trackers = self.getTrackers() + self.log.debug("Loaded %s shared trackers" % len(trackers)) + for address, tracker in list(trackers.items()): + tracker["num_error"] = 0 + if not address.startswith("zero://"): + del trackers[address] + + def save(self): + s = time.time() + helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True).encode("utf8")) + self.log.debug("Saved in %.3fs" % (time.time() - s)) + + def discoverTrackers(self, peers): + if len(self.getWorkingTrackers()) > config.working_shared_trackers_limit: + return False + s = time.time() + num_success = 0 + for peer in peers: + if peer.connection and peer.connection.handshake.get("rev", 0) < 3560: + continue # Not supported + + res = peer.request("getTrackers") + if not res or "error" in res: + continue + + num_success += 1 + for tracker_address in res["trackers"]: + if type(tracker_address) is bytes: # Backward compatibilitys + tracker_address = tracker_address.decode("utf8") + added = self.onTrackerFound(tracker_address) + if added: # Only add one tracker from one source + break + + if not num_success and len(peers) < 20: + self.time_discover = 0.0 + + if num_success: + self.save() + + self.log.debug("Trackers discovered from %s/%s peers in %.3fs" % (num_success, len(peers), time.time() - s)) + + +if "tracker_storage" not in locals(): + tracker_storage = TrackerStorage() + + +@PluginManager.registerTo("SiteAnnouncer") +class SiteAnnouncerPlugin(object): + def getTrackers(self): + if tracker_storage.time_discover < time.time() - 5 * 60: + tracker_storage.time_discover = time.time() + gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers()) + trackers = super(SiteAnnouncerPlugin, self).getTrackers() + shared_trackers = list(tracker_storage.getTrackers("shared").keys()) + if shared_trackers: + return trackers + shared_trackers + else: + return trackers + + def announceTracker(self, tracker, *args, **kwargs): + res = super(SiteAnnouncerPlugin, self).announceTracker(tracker, *args, **kwargs) + if res: + latency = res + tracker_storage.onTrackerSuccess(tracker, latency) + elif res is False: + tracker_storage.onTrackerError(tracker) + + return res + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def actionGetTrackers(self, params): + shared_trackers = list(tracker_storage.getWorkingTrackers("shared").keys()) + self.response({"trackers": shared_trackers}) + + +@PluginManager.registerTo("FileServer") +class FileServerPlugin(object): + def portCheck(self, *args, **kwargs): + res = super(FileServerPlugin, self).portCheck(*args, **kwargs) + if res and not config.tor == "always" and "Bootstrapper" in PluginManager.plugin_manager.plugin_names: + for ip in self.ip_external_list: + my_tracker_address = "zero://%s:%s" % (ip, config.fileserver_port) + tracker_storage.onTrackerFound(my_tracker_address, my=True) + return res + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("AnnounceShare plugin") + group.add_argument('--working_shared_trackers_limit', help='Stop discovering new shared trackers after this number of shared trackers reached', default=5, type=int, metavar='limit') + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/AnnounceShare/Test/TestAnnounceShare.py b/plugins/AnnounceShare/Test/TestAnnounceShare.py new file mode 100644 index 00000000..7178eac8 --- /dev/null +++ b/plugins/AnnounceShare/Test/TestAnnounceShare.py @@ -0,0 +1,24 @@ +import pytest + +from AnnounceShare import AnnounceSharePlugin +from Peer import Peer +from Config import config + + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestAnnounceShare: + def testAnnounceList(self, file_server): + open("%s/trackers.json" % config.data_dir, "w").write("{}") + tracker_storage = AnnounceSharePlugin.tracker_storage + tracker_storage.load() + peer = Peer(file_server.ip, 1544, connection_server=file_server) + assert peer.request("getTrackers")["trackers"] == [] + + tracker_storage.onTrackerFound("zero://%s:15441" % file_server.ip) + assert peer.request("getTrackers")["trackers"] == [] + + # It needs to have at least one successfull announce to be shared to other peers + tracker_storage.onTrackerSuccess("zero://%s:15441" % file_server.ip, 1.0) + assert peer.request("getTrackers")["trackers"] == ["zero://%s:15441" % file_server.ip] + diff --git a/plugins/AnnounceShare/Test/conftest.py b/plugins/AnnounceShare/Test/conftest.py new file mode 100644 index 00000000..5abd4dd6 --- /dev/null +++ b/plugins/AnnounceShare/Test/conftest.py @@ -0,0 +1,3 @@ +from src.Test.conftest import * + +from Config import config diff --git a/plugins/AnnounceShare/Test/pytest.ini b/plugins/AnnounceShare/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/AnnounceShare/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/AnnounceShare/__init__.py b/plugins/AnnounceShare/__init__.py new file mode 100644 index 00000000..dc1e40bd --- /dev/null +++ b/plugins/AnnounceShare/__init__.py @@ -0,0 +1 @@ +from . import AnnounceSharePlugin diff --git a/plugins/AnnounceZero/AnnounceZeroPlugin.py b/plugins/AnnounceZero/AnnounceZeroPlugin.py new file mode 100644 index 00000000..dcaa04f0 --- /dev/null +++ b/plugins/AnnounceZero/AnnounceZeroPlugin.py @@ -0,0 +1,138 @@ +import time +import itertools + +from Plugin import PluginManager +from util import helper +from Crypt import CryptRsa + +allow_reload = False # No source reload supported in this plugin +time_full_announced = {} # Tracker address: Last announced all site to tracker +connection_pool = {} # Tracker address: Peer object + + +# We can only import plugin host clases after the plugins are loaded +@PluginManager.afterLoad +def importHostClasses(): + global Peer, AnnounceError + from Peer import Peer + from Site.SiteAnnouncer import AnnounceError + + +# Process result got back from tracker +def processPeerRes(tracker_address, site, peers): + added = 0 + # Ip4 + found_ipv4 = 0 + peers_normal = itertools.chain(peers.get("ip4", []), peers.get("ipv4", []), peers.get("ipv6", [])) + for packed_address in peers_normal: + found_ipv4 += 1 + peer_ip, peer_port = helper.unpackAddress(packed_address) + if site.addPeer(peer_ip, peer_port, source="tracker"): + added += 1 + # Onion + found_onion = 0 + for packed_address in peers["onion"]: + found_onion += 1 + peer_onion, peer_port = helper.unpackOnionAddress(packed_address) + if site.addPeer(peer_onion, peer_port, source="tracker"): + added += 1 + + if added: + site.worker_manager.onPeers() + site.updateWebsocket(peers_added=added) + return added + + +@PluginManager.registerTo("SiteAnnouncer") +class SiteAnnouncerPlugin(object): + def getTrackerHandler(self, protocol): + if protocol == "zero": + return self.announceTrackerZero + else: + return super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol) + + def announceTrackerZero(self, tracker_address, mode="start", num_want=10): + global time_full_announced + s = time.time() + + need_types = ["ip4"] # ip4 for backward compatibility reasons + need_types += self.site.connection_server.supported_ip_types + if self.site.connection_server.tor_manager.enabled: + need_types.append("onion") + + if mode == "start" or mode == "more": # Single: Announce only this site + sites = [self.site] + full_announce = False + else: # Multi: Announce all currently serving site + full_announce = True + if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 15: # No reannounce all sites within short time + return None + time_full_announced[tracker_address] = time.time() + from Site import SiteManager + sites = [site for site in SiteManager.site_manager.sites.values() if site.isServing()] + + # Create request + add_types = self.getOpenedServiceTypes() + request = { + "hashes": [], "onions": [], "port": self.fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types + } + for site in sites: + if "onion" in add_types: + onion = self.site.connection_server.tor_manager.getOnion(site.address) + request["onions"].append(onion) + request["hashes"].append(site.address_hash) + + # Tracker can remove sites that we don't announce + if full_announce: + request["delete"] = True + + # Sent request to tracker + tracker_peer = connection_pool.get(tracker_address) # Re-use tracker connection if possible + if not tracker_peer: + tracker_ip, tracker_port = tracker_address.rsplit(":", 1) + tracker_peer = Peer(str(tracker_ip), int(tracker_port), connection_server=self.site.connection_server) + tracker_peer.is_tracker_connection = True + connection_pool[tracker_address] = tracker_peer + + res = tracker_peer.request("announce", request) + + if not res or "peers" not in res: + if full_announce: + time_full_announced[tracker_address] = 0 + raise AnnounceError("Invalid response: %s" % res) + + # Add peers from response to site + site_index = 0 + peers_added = 0 + for site_res in res["peers"]: + site = sites[site_index] + peers_added += processPeerRes(tracker_address, site, site_res) + site_index += 1 + + # Check if we need to sign prove the onion addresses + if "onion_sign_this" in res: + self.site.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites))) + request["onion_signs"] = {} + request["onion_sign_this"] = res["onion_sign_this"] + request["need_num"] = 0 + for site in sites: + onion = self.site.connection_server.tor_manager.getOnion(site.address) + publickey = self.site.connection_server.tor_manager.getPublickey(onion) + if publickey not in request["onion_signs"]: + sign = CryptRsa.sign(res["onion_sign_this"].encode("utf8"), self.site.connection_server.tor_manager.getPrivatekey(onion)) + request["onion_signs"][publickey] = sign + res = tracker_peer.request("announce", request) + if not res or "onion_sign_this" in res: + if full_announce: + time_full_announced[tracker_address] = 0 + raise AnnounceError("Announce onion address to failed: %s" % res) + + if full_announce: + tracker_peer.remove() # Close connection, we don't need it in next 5 minute + + self.site.log.debug( + "Tracker announce result: zero://%s (sites: %s, new peers: %s, add: %s) in %.3fs" % + (tracker_address, site_index, peers_added, add_types, time.time() - s) + ) + + return True diff --git a/plugins/AnnounceZero/__init__.py b/plugins/AnnounceZero/__init__.py new file mode 100644 index 00000000..8aec5ddb --- /dev/null +++ b/plugins/AnnounceZero/__init__.py @@ -0,0 +1 @@ +from . import AnnounceZeroPlugin \ No newline at end of file diff --git a/plugins/Bigfile/BigfilePiecefield.py b/plugins/Bigfile/BigfilePiecefield.py new file mode 100644 index 00000000..ee770573 --- /dev/null +++ b/plugins/Bigfile/BigfilePiecefield.py @@ -0,0 +1,164 @@ +import array + + +def packPiecefield(data): + assert isinstance(data, bytes) or isinstance(data, bytearray) + res = [] + if not data: + return array.array("H", b"") + + if data[0] == b"\x00": + res.append(0) + find = b"\x01" + else: + find = b"\x00" + last_pos = 0 + pos = 0 + while 1: + pos = data.find(find, pos) + if find == b"\x00": + find = b"\x01" + else: + find = b"\x00" + if pos == -1: + res.append(len(data) - last_pos) + break + res.append(pos - last_pos) + last_pos = pos + return array.array("H", res) + + +def unpackPiecefield(data): + if not data: + return b"" + + res = [] + char = b"\x01" + for times in data: + if times > 10000: + return b"" + res.append(char * times) + if char == b"\x01": + char = b"\x00" + else: + char = b"\x01" + return b"".join(res) + + +def spliceBit(data, idx, bit): + assert bit == b"\x00" or bit == b"\x01" + if len(data) < idx: + data = data.ljust(idx + 1, b"\x00") + return data[:idx] + bit + data[idx+ 1:] + +class Piecefield(object): + def tostring(self): + return "".join(["1" if b else "0" for b in self.tobytes()]) + + +class BigfilePiecefield(Piecefield): + __slots__ = ["data"] + + def __init__(self): + self.data = b"" + + def frombytes(self, s): + assert isinstance(s, bytes) or isinstance(s, bytearray) + self.data = s + + def tobytes(self): + return self.data + + def pack(self): + return packPiecefield(self.data).tobytes() + + def unpack(self, s): + self.data = unpackPiecefield(array.array("H", s)) + + def __getitem__(self, key): + try: + return self.data[key] + except IndexError: + return False + + def __setitem__(self, key, value): + self.data = spliceBit(self.data, key, value) + +class BigfilePiecefieldPacked(Piecefield): + __slots__ = ["data"] + + def __init__(self): + self.data = b"" + + def frombytes(self, data): + assert isinstance(data, bytes) or isinstance(data, bytearray) + self.data = packPiecefield(data).tobytes() + + def tobytes(self): + return unpackPiecefield(array.array("H", self.data)) + + def pack(self): + return array.array("H", self.data).tobytes() + + def unpack(self, data): + self.data = data + + def __getitem__(self, key): + try: + return self.tobytes()[key] + except IndexError: + return False + + def __setitem__(self, key, value): + data = spliceBit(self.tobytes(), key, value) + self.frombytes(data) + + +if __name__ == "__main__": + import os + import psutil + import time + testdata = b"\x01" * 100 + b"\x00" * 900 + b"\x01" * 4000 + b"\x00" * 4999 + b"\x01" + meminfo = psutil.Process(os.getpid()).memory_info + + for storage in [BigfilePiecefieldPacked, BigfilePiecefield]: + print("-- Testing storage: %s --" % storage) + m = meminfo()[0] + s = time.time() + piecefields = {} + for i in range(10000): + piecefield = storage() + piecefield.frombytes(testdata[:i] + b"\x00" + testdata[i + 1:]) + piecefields[i] = piecefield + + print("Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))) + + m = meminfo()[0] + s = time.time() + for piecefield in list(piecefields.values()): + val = piecefield[1000] + + print("Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)) + + m = meminfo()[0] + s = time.time() + for piecefield in list(piecefields.values()): + piecefield[1000] = b"\x01" + + print("Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s)) + + m = meminfo()[0] + s = time.time() + for piecefield in list(piecefields.values()): + packed = piecefield.pack() + + print("Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed))) + + m = meminfo()[0] + s = time.time() + for piecefield in list(piecefields.values()): + piecefield.unpack(packed) + + print("Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data))) + + piecefields = {} diff --git a/plugins/Bigfile/BigfilePlugin.py b/plugins/Bigfile/BigfilePlugin.py new file mode 100644 index 00000000..03a0f44f --- /dev/null +++ b/plugins/Bigfile/BigfilePlugin.py @@ -0,0 +1,784 @@ +import time +import os +import subprocess +import shutil +import collections +import math +import warnings +import base64 +import binascii +import json + +import gevent +import gevent.lock + +from Plugin import PluginManager +from Debug import Debug +from Crypt import CryptHash +with warnings.catch_warnings(): + warnings.filterwarnings("ignore") # Ignore missing sha3 warning + import merkletools + +from util import helper +from util import Msgpack +import util +from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked + + +# We can only import plugin host clases after the plugins are loaded +@PluginManager.afterLoad +def importPluginnedClasses(): + global VerifyError, config + from Content.ContentManager import VerifyError + from Config import config + +if "upload_nonces" not in locals(): + upload_nonces = {} + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def isCorsAllowed(self, path): + if path == "/ZeroNet-Internal/BigfileUpload": + return True + else: + return super(UiRequestPlugin, self).isCorsAllowed(path) + + @helper.encodeResponse + def actionBigfileUpload(self): + nonce = self.get.get("upload_nonce") + if nonce not in upload_nonces: + return self.error403("Upload nonce error.") + + upload_info = upload_nonces[nonce] + del upload_nonces[nonce] + + self.sendHeader(200, "text/html", noscript=True, extra_headers={ + "Access-Control-Allow-Origin": "null", + "Access-Control-Allow-Credentials": "true" + }) + + self.readMultipartHeaders(self.env['wsgi.input']) # Skip http headers + + site = upload_info["site"] + inner_path = upload_info["inner_path"] + + with site.storage.open(inner_path, "wb", create_dirs=True) as out_file: + merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile( + self.env['wsgi.input'], upload_info["size"], upload_info["piece_size"], out_file + ) + + if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split + hash = binascii.hexlify(piecemap_info["sha512_pieces"][0]) + hash_id = site.content_manager.hashfield.getHashId(hash) + site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True) + + else: # Big file + file_name = helper.getFilename(inner_path) + site.storage.open(upload_info["piecemap"], "wb").write(Msgpack.pack({file_name: piecemap_info})) + + # Find piecemap and file relative path to content.json + file_info = site.content_manager.getFileInfo(inner_path, new_file=True) + content_inner_path_dir = helper.getDirname(file_info["content_inner_path"]) + piecemap_relative_path = upload_info["piecemap"][len(content_inner_path_dir):] + file_relative_path = inner_path[len(content_inner_path_dir):] + + # Add file to content.json + if site.storage.isFile(file_info["content_inner_path"]): + content = site.storage.loadJson(file_info["content_inner_path"]) + else: + content = {} + if "files_optional" not in content: + content["files_optional"] = {} + + content["files_optional"][file_relative_path] = { + "sha512": merkle_root, + "size": upload_info["size"], + "piecemap": piecemap_relative_path, + "piece_size": piece_size + } + + merkle_root_hash_id = site.content_manager.hashfield.getHashId(merkle_root) + site.content_manager.optionalDownloaded(inner_path, merkle_root_hash_id, upload_info["size"], own=True) + site.storage.writeJson(file_info["content_inner_path"], content) + + site.content_manager.contents.loadItem(file_info["content_inner_path"]) # reload cache + + return json.dumps({ + "merkle_root": merkle_root, + "piece_num": len(piecemap_info["sha512_pieces"]), + "piece_size": piece_size, + "inner_path": inner_path + }) + + def readMultipartHeaders(self, wsgi_input): + found = False + for i in range(100): + line = wsgi_input.readline() + if line == b"\r\n": + found = True + break + if not found: + raise Exception("No multipart header found") + return i + + def actionFile(self, file_path, *args, **kwargs): + if kwargs.get("file_size", 0) > 1024 * 1024 and kwargs.get("path_parts"): # Only check files larger than 1MB + path_parts = kwargs["path_parts"] + site = self.server.site_manager.get(path_parts["address"]) + big_file = site.storage.openBigfile(path_parts["inner_path"], prebuffer=2 * 1024 * 1024) + if big_file: + kwargs["file_obj"] = big_file + kwargs["file_size"] = big_file.size + + return super(UiRequestPlugin, self).actionFile(file_path, *args, **kwargs) + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def actionBigfileUploadInit(self, to, inner_path, size): + valid_signers = self.site.content_manager.getValidSigners(inner_path) + auth_address = self.user.getAuthAddress(self.site.address) + if not self.site.settings["own"] and auth_address not in valid_signers: + self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers)) + return self.response(to, {"error": "Forbidden, you can only modify your own files"}) + + nonce = CryptHash.random() + piece_size = 1024 * 1024 + inner_path = self.site.content_manager.sanitizePath(inner_path) + file_info = self.site.content_manager.getFileInfo(inner_path, new_file=True) + + content_inner_path_dir = helper.getDirname(file_info["content_inner_path"]) + file_relative_path = inner_path[len(content_inner_path_dir):] + + upload_nonces[nonce] = { + "added": time.time(), + "site": self.site, + "inner_path": inner_path, + "websocket_client": self, + "size": size, + "piece_size": piece_size, + "piecemap": inner_path + ".piecemap.msgpack" + } + return { + "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce, + "piece_size": piece_size, + "inner_path": inner_path, + "file_relative_path": file_relative_path + } + + def actionSiteSetAutodownloadBigfileLimit(self, to, limit): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.settings["autodownload_bigfile_size_limit"] = int(limit) + self.response(to, "ok") + + def actionFileDelete(self, to, inner_path): + piecemap_inner_path = inner_path + ".piecemap.msgpack" + if self.hasFilePermission(inner_path) and self.site.storage.isFile(piecemap_inner_path): + # Also delete .piecemap.msgpack file if exists + self.log.debug("Deleting piecemap: %s" % piecemap_inner_path) + file_info = self.site.content_manager.getFileInfo(piecemap_inner_path) + if file_info: + content_json = self.site.storage.loadJson(file_info["content_inner_path"]) + relative_path = file_info["relative_path"] + if relative_path in content_json.get("files_optional", {}): + del content_json["files_optional"][relative_path] + self.site.storage.writeJson(file_info["content_inner_path"], content_json) + self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) + try: + self.site.storage.delete(piecemap_inner_path) + except Exception as err: + self.log.error("File %s delete error: %s" % (piecemap_inner_path, err)) + + return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path) + + +@PluginManager.registerTo("ContentManager") +class ContentManagerPlugin(object): + def getFileInfo(self, inner_path, *args, **kwargs): + if "|" not in inner_path: + return super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs) + + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + file_info = super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs) + return file_info + + def readFile(self, file_in, size, buff_size=1024 * 64): + part_num = 0 + recv_left = size + + while 1: + part_num += 1 + read_size = min(buff_size, recv_left) + part = file_in.read(read_size) + + if not part: + break + yield part + + if part_num % 100 == 0: # Avoid blocking ZeroNet execution during upload + time.sleep(0.001) + + recv_left -= read_size + if recv_left <= 0: + break + + def hashBigfile(self, file_in, size, piece_size=1024 * 1024, file_out=None): + self.site.settings["has_bigfile"] = True + + recv = 0 + try: + piece_hash = CryptHash.sha512t() + piece_hashes = [] + piece_recv = 0 + + mt = merkletools.MerkleTools() + mt.hash_function = CryptHash.sha512t + + part = "" + for part in self.readFile(file_in, size): + if file_out: + file_out.write(part) + + recv += len(part) + piece_recv += len(part) + piece_hash.update(part) + if piece_recv >= piece_size: + piece_digest = piece_hash.digest() + piece_hashes.append(piece_digest) + mt.leaves.append(piece_digest) + piece_hash = CryptHash.sha512t() + piece_recv = 0 + + if len(piece_hashes) % 100 == 0 or recv == size: + self.log.info("- [HASHING:%.0f%%] Pieces: %s, %.1fMB/%.1fMB" % ( + float(recv) / size * 100, len(piece_hashes), recv / 1024 / 1024, size / 1024 / 1024 + )) + part = "" + if len(part) > 0: + piece_digest = piece_hash.digest() + piece_hashes.append(piece_digest) + mt.leaves.append(piece_digest) + except Exception as err: + raise err + finally: + if file_out: + file_out.close() + + mt.make_tree() + merkle_root = mt.get_merkle_root() + if type(merkle_root) is bytes: # Python <3.5 + merkle_root = merkle_root.decode() + return merkle_root, piece_size, { + "sha512_pieces": piece_hashes + } + + def hashFile(self, dir_inner_path, file_relative_path, optional=False): + inner_path = dir_inner_path + file_relative_path + + file_size = self.site.storage.getSize(inner_path) + # Only care about optional files >1MB + if not optional or file_size < 1 * 1024 * 1024: + return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional) + + back = {} + content = self.contents.get(dir_inner_path + "content.json") + + hash = None + piecemap_relative_path = None + piece_size = None + + # Don't re-hash if it's already in content.json + if content and file_relative_path in content.get("files_optional", {}): + file_node = content["files_optional"][file_relative_path] + if file_node["size"] == file_size: + self.log.info("- [SAME SIZE] %s" % file_relative_path) + hash = file_node.get("sha512") + piecemap_relative_path = file_node.get("piecemap") + piece_size = file_node.get("piece_size") + + if not hash or not piecemap_relative_path: # Not in content.json yet + if file_size < 5 * 1024 * 1024: # Don't create piecemap automatically for files smaller than 5MB + return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional) + + self.log.info("- [HASHING] %s" % file_relative_path) + merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size) + if not hash: + hash = merkle_root + + if not piecemap_relative_path: + file_name = helper.getFilename(file_relative_path) + piecemap_relative_path = file_relative_path + ".piecemap.msgpack" + piecemap_inner_path = inner_path + ".piecemap.msgpack" + + self.site.storage.open(piecemap_inner_path, "wb").write(Msgpack.pack({file_name: piecemap_info})) + + back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True)) + + piece_num = int(math.ceil(float(file_size) / piece_size)) + + # Add the merkle root to hashfield + hash_id = self.site.content_manager.hashfield.getHashId(hash) + self.optionalDownloaded(inner_path, hash_id, file_size, own=True) + self.site.storage.piecefields[hash].frombytes(b"\x01" * piece_num) + + back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size} + return back + + def getPiecemap(self, inner_path): + file_info = self.site.content_manager.getFileInfo(inner_path) + piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"] + self.site.needFile(piecemap_inner_path, priority=20) + piecemap = Msgpack.unpack(self.site.storage.open(piecemap_inner_path, "rb").read())[helper.getFilename(inner_path)] + piecemap["piece_size"] = file_info["piece_size"] + return piecemap + + def verifyPiece(self, inner_path, pos, piece): + piecemap = self.getPiecemap(inner_path) + piece_i = int(pos / piecemap["piece_size"]) + if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]: + raise VerifyError("Invalid hash") + return True + + def verifyFile(self, inner_path, file, ignore_same=True): + if "|" not in inner_path: + return super(ContentManagerPlugin, self).verifyFile(inner_path, file, ignore_same) + + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + + return self.verifyPiece(inner_path, pos_from, file) + + def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): + if "|" in inner_path: + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + file_info = self.getFileInfo(inner_path) + + # Mark piece downloaded + piece_i = int(pos_from / file_info["piece_size"]) + self.site.storage.piecefields[file_info["sha512"]][piece_i] = b"\x01" + + # Only add to site size on first request + if hash_id in self.hashfield: + size = 0 + elif size > 1024 * 1024: + file_info = self.getFileInfo(inner_path) + if file_info and "sha512" in file_info: # We already have the file, but not in piecefield + sha512 = file_info["sha512"] + if sha512 not in self.site.storage.piecefields: + self.site.storage.checkBigfile(inner_path) + + return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own) + + def optionalRemoved(self, inner_path, hash_id, size=None): + if size and size > 1024 * 1024: + file_info = self.getFileInfo(inner_path) + sha512 = file_info["sha512"] + if sha512 in self.site.storage.piecefields: + del self.site.storage.piecefields[sha512] + + # Also remove other pieces of the file from download queue + for key in list(self.site.bad_files.keys()): + if key.startswith(inner_path + "|"): + del self.site.bad_files[key] + self.site.worker_manager.removeSolvedFileTasks() + return super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size) + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def __init__(self, *args, **kwargs): + super(SiteStoragePlugin, self).__init__(*args, **kwargs) + self.piecefields = collections.defaultdict(BigfilePiecefield) + if "piecefields" in self.site.settings.get("cache", {}): + for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").items(): + if piecefield_packed: + self.piecefields[sha512].unpack(base64.b64decode(piecefield_packed)) + self.site.settings["cache"]["piecefields"] = {} + + def createSparseFile(self, inner_path, size, sha512=None): + file_path = self.getPath(inner_path) + + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) + + f = open(file_path, 'wb') + f.truncate(min(1024 * 1024 * 5, size)) # Only pre-allocate up to 5MB + f.close() + if os.name == "nt": + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + subprocess.call(["fsutil", "sparse", "setflag", file_path], close_fds=True, startupinfo=startupinfo) + + if sha512 and sha512 in self.piecefields: + self.log.debug("%s: File not exists, but has piecefield. Deleting piecefield." % inner_path) + del self.piecefields[sha512] + + def write(self, inner_path, content): + if "|" not in inner_path: + return super(SiteStoragePlugin, self).write(inner_path, content) + + # Write to specific position by passing |{pos} after the filename + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + file_path = self.getPath(inner_path) + + # Create dir if not exist + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) + + if not os.path.isfile(file_path): + file_info = self.site.content_manager.getFileInfo(inner_path) + self.createSparseFile(inner_path, file_info["size"]) + + # Write file + with open(file_path, "rb+") as file: + file.seek(pos_from) + if hasattr(content, 'read'): # File-like object + shutil.copyfileobj(content, file) # Write buff to disk + else: # Simple string + file.write(content) + del content + self.onUpdated(inner_path) + + def checkBigfile(self, inner_path): + file_info = self.site.content_manager.getFileInfo(inner_path) + if not file_info or (file_info and "piecemap" not in file_info): # It's not a big file + return False + + self.site.settings["has_bigfile"] = True + file_path = self.getPath(inner_path) + sha512 = file_info["sha512"] + piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"])) + if os.path.isfile(file_path): + if sha512 not in self.piecefields: + if open(file_path, "rb").read(128) == b"\0" * 128: + piece_data = b"\x00" + else: + piece_data = b"\x01" + self.log.debug("%s: File exists, but not in piecefield. Filling piecefiled with %s * %s." % (inner_path, piece_num, piece_data)) + self.piecefields[sha512].frombytes(piece_data * piece_num) + else: + self.log.debug("Creating bigfile: %s" % inner_path) + self.createSparseFile(inner_path, file_info["size"], sha512) + self.piecefields[sha512].frombytes(b"\x00" * piece_num) + self.log.debug("Created bigfile: %s" % inner_path) + return True + + def openBigfile(self, inner_path, prebuffer=0): + if not self.checkBigfile(inner_path): + return False + self.site.needFile(inner_path, blocking=False) # Download piecemap + return BigFile(self.site, inner_path, prebuffer=prebuffer) + + +class BigFile(object): + def __init__(self, site, inner_path, prebuffer=0): + self.site = site + self.inner_path = inner_path + file_path = site.storage.getPath(inner_path) + file_info = self.site.content_manager.getFileInfo(inner_path) + self.piece_size = file_info["piece_size"] + self.sha512 = file_info["sha512"] + self.size = file_info["size"] + self.prebuffer = prebuffer + self.read_bytes = 0 + + self.piecefield = self.site.storage.piecefields[self.sha512] + self.f = open(file_path, "rb+") + self.read_lock = gevent.lock.Semaphore() + + def read(self, buff=64 * 1024): + with self.read_lock: + pos = self.f.tell() + read_until = min(self.size, pos + buff) + requests = [] + # Request all required blocks + while 1: + piece_i = int(pos / self.piece_size) + if piece_i * self.piece_size >= read_until: + break + pos_from = piece_i * self.piece_size + pos_to = pos_from + self.piece_size + if not self.piecefield[piece_i]: + requests.append(self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=10)) + pos += self.piece_size + + if not all(requests): + return None + + # Request prebuffer + if self.prebuffer: + prebuffer_until = min(self.size, read_until + self.prebuffer) + priority = 3 + while 1: + piece_i = int(pos / self.piece_size) + if piece_i * self.piece_size >= prebuffer_until: + break + pos_from = piece_i * self.piece_size + pos_to = pos_from + self.piece_size + if not self.piecefield[piece_i]: + self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=max(0, priority)) + priority -= 1 + pos += self.piece_size + + gevent.joinall(requests) + self.read_bytes += buff + + # Increase buffer for long reads + if self.read_bytes > 7 * 1024 * 1024 and self.prebuffer < 5 * 1024 * 1024: + self.site.log.debug("%s: Increasing bigfile buffer size to 5MB..." % self.inner_path) + self.prebuffer = 5 * 1024 * 1024 + + return self.f.read(buff) + + def seek(self, pos, whence=0): + with self.read_lock: + if whence == 2: # Relative from file end + pos = self.size + pos # Use the real size instead of size on the disk + whence = 0 + return self.f.seek(pos, whence) + + def tell(self): + return self.f.tell() + + def close(self): + self.f.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +@PluginManager.registerTo("WorkerManager") +class WorkerManagerPlugin(object): + def addTask(self, inner_path, *args, **kwargs): + file_info = kwargs.get("file_info") + if file_info and "piecemap" in file_info: # Bigfile + self.site.settings["has_bigfile"] = True + + piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"] + piecemap_task = None + if not self.site.storage.isFile(piecemap_inner_path): + # Start download piecemap + piecemap_task = super(WorkerManagerPlugin, self).addTask(piecemap_inner_path, priority=30) + autodownload_bigfile_size_limit = self.site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit) + if "|" not in inner_path and self.site.isDownloadable(inner_path) and file_info["size"] / 1024 / 1024 <= autodownload_bigfile_size_limit: + gevent.spawn_later(0.1, self.site.needFile, inner_path + "|all") # Download all pieces + + if "|" in inner_path: + # Start download piece + task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs) + + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + task["piece_i"] = int(pos_from / file_info["piece_size"]) + task["sha512"] = file_info["sha512"] + else: + if inner_path in self.site.bad_files: + del self.site.bad_files[inner_path] + if piecemap_task: + task = piecemap_task + else: + fake_evt = gevent.event.AsyncResult() # Don't download anything if no range specified + fake_evt.set(True) + task = {"evt": fake_evt} + + if not self.site.storage.isFile(inner_path): + self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["sha512"]) + piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"])) + self.site.storage.piecefields[file_info["sha512"]].frombytes(b"\x00" * piece_num) + else: + task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs) + return task + + def taskAddPeer(self, task, peer): + if "piece_i" in task: + if not peer.piecefields[task["sha512"]][task["piece_i"]]: + if task["sha512"] not in peer.piecefields: + gevent.spawn(peer.updatePiecefields, force=True) + elif not task["peers"]: + gevent.spawn(peer.updatePiecefields) + + return False # Deny to add peers to task if file not in piecefield + return super(WorkerManagerPlugin, self).taskAddPeer(task, peer) + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def isReadable(self, site, inner_path, file, pos): + # Peek into file + if file.read(10) == b"\0" * 10: + # Looks empty, but makes sures we don't have that piece + file_info = site.content_manager.getFileInfo(inner_path) + if "piece_size" in file_info: + piece_i = int(pos / file_info["piece_size"]) + if not site.storage.piecefields[file_info["sha512"]][piece_i]: + return False + # Seek back to position we want to read + file.seek(pos) + return super(FileRequestPlugin, self).isReadable(site, inner_path, file, pos) + + def actionGetPiecefields(self, params): + site = self.sites.get(params["site"]) + if not site or not site.isServing(): # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False + + # Add peer to site if not added before + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) + if not peer.connection: # Just added + peer.connect(self.connection) # Assign current connection to peer + + piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.items()} + self.response({"piecefields_packed": piecefields_packed}) + + def actionSetPiecefields(self, params): + site = self.sites.get(params["site"]) + if not site or not site.isServing(): # Site unknown or not serving + self.response({"error": "Unknown site"}) + self.connection.badAction(5) + return False + + # Add or get peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection) + if not peer.connection: + peer.connect(self.connection) + + peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked) + for sha512, piecefield_packed in params["piecefields_packed"].items(): + peer.piecefields[sha512].unpack(piecefield_packed) + site.settings["has_bigfile"] = True + + self.response({"ok": "Updated"}) + + +@PluginManager.registerTo("Peer") +class PeerPlugin(object): + def __getattr__(self, key): + if key == "piecefields": + self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) + return self.piecefields + elif key == "time_piecefields_updated": + self.time_piecefields_updated = None + return self.time_piecefields_updated + else: + return super(PeerPlugin, self).__getattr__(key) + + @util.Noparallel(ignore_args=True) + def updatePiecefields(self, force=False): + if self.connection and self.connection.handshake.get("rev", 0) < 2190: + return False # Not supported + + # Don't update piecefield again in 1 min + if self.time_piecefields_updated and time.time() - self.time_piecefields_updated < 60 and not force: + return False + + self.time_piecefields_updated = time.time() + res = self.request("getPiecefields", {"site": self.site.address}) + if not res or "error" in res: + return False + + self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) + try: + for sha512, piecefield_packed in res["piecefields_packed"].items(): + self.piecefields[sha512].unpack(piecefield_packed) + except Exception as err: + self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err)) + + return self.piecefields + + def sendMyHashfield(self, *args, **kwargs): + return super(PeerPlugin, self).sendMyHashfield(*args, **kwargs) + + def updateHashfield(self, *args, **kwargs): + if self.site.settings.get("has_bigfile"): + thread = gevent.spawn(self.updatePiecefields, *args, **kwargs) + back = super(PeerPlugin, self).updateHashfield(*args, **kwargs) + thread.join() + return back + else: + return super(PeerPlugin, self).updateHashfield(*args, **kwargs) + + def getFile(self, site, inner_path, *args, **kwargs): + if "|" in inner_path: + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + kwargs["pos_from"] = pos_from + kwargs["pos_to"] = pos_to + return super(PeerPlugin, self).getFile(site, inner_path, *args, **kwargs) + + +@PluginManager.registerTo("Site") +class SitePlugin(object): + def isFileDownloadAllowed(self, inner_path, file_info): + if "piecemap" in file_info: + file_size_mb = file_info["size"] / 1024 / 1024 + if config.bigfile_size_limit and file_size_mb > config.bigfile_size_limit: + self.log.debug( + "Bigfile size %s too large: %sMB > %sMB, skipping..." % + (inner_path, file_size_mb, config.bigfile_size_limit) + ) + return False + + file_info = file_info.copy() + file_info["size"] = file_info["piece_size"] + return super(SitePlugin, self).isFileDownloadAllowed(inner_path, file_info) + + def getSettingsCache(self): + back = super(SitePlugin, self).getSettingsCache() + if self.storage.piecefields: + back["piecefields"] = {sha512: base64.b64encode(piecefield.pack()).decode("utf8") for sha512, piecefield in self.storage.piecefields.items()} + return back + + def needFile(self, inner_path, *args, **kwargs): + if inner_path.endswith("|all"): + @util.Pooled(20) + def pooledNeedBigfile(inner_path, *args, **kwargs): + if inner_path not in self.bad_files: + self.log.debug("Cancelled piece, skipping %s" % inner_path) + return False + return self.needFile(inner_path, *args, **kwargs) + + inner_path = inner_path.replace("|all", "") + file_info = self.needFileInfo(inner_path) + file_size = file_info["size"] + piece_size = file_info["piece_size"] + + piece_num = int(math.ceil(float(file_size) / piece_size)) + + file_threads = [] + + piecefield = self.storage.piecefields.get(file_info["sha512"]) + + for piece_i in range(piece_num): + piece_from = piece_i * piece_size + piece_to = min(file_size, piece_from + piece_size) + if not piecefield or not piecefield[piece_i]: + inner_path_piece = "%s|%s-%s" % (inner_path, piece_from, piece_to) + self.bad_files[inner_path_piece] = self.bad_files.get(inner_path_piece, 1) + res = pooledNeedBigfile(inner_path_piece, blocking=False) + if res is not True and res is not False: + file_threads.append(res) + gevent.joinall(file_threads) + else: + return super(SitePlugin, self).needFile(inner_path, *args, **kwargs) + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("Bigfile plugin") + group.add_argument('--autodownload_bigfile_size_limit', help='Also download bigfiles smaller than this limit if help distribute option is checked', default=1, metavar="MB", type=int) + group.add_argument('--bigfile_size_limit', help='Maximum size of downloaded big files', default=False, metavar="MB", type=int) + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/Bigfile/Test/TestBigfile.py b/plugins/Bigfile/Test/TestBigfile.py new file mode 100644 index 00000000..7d112860 --- /dev/null +++ b/plugins/Bigfile/Test/TestBigfile.py @@ -0,0 +1,574 @@ +import time +import io +import binascii + +import pytest +import mock + +from Connection import ConnectionServer +from Content.ContentManager import VerifyError +from File import FileServer +from File import FileRequest +from Worker import WorkerManager +from Peer import Peer +from Bigfile import BigfilePiecefield, BigfilePiecefieldPacked +from Test import Spy +from util import Msgpack + + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestBigfile: + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" + piece_size = 1024 * 1024 + + def createBigfile(self, site, inner_path="data/optional.any.iso", pieces=10): + f = site.storage.open(inner_path, "w") + for i in range(pieces * 100): + f.write(("Test%s" % i).ljust(10, "-") * 1000) + f.close() + assert site.content_manager.sign("content.json", self.privatekey) + return inner_path + + def testPiecemapCreate(self, site): + inner_path = self.createBigfile(site) + content = site.storage.loadJson("content.json") + assert "data/optional.any.iso" in content["files_optional"] + file_node = content["files_optional"][inner_path] + assert file_node["size"] == 10 * 1000 * 1000 + assert file_node["sha512"] == "47a72cde3be80b4a829e7674f72b7c6878cf6a70b0c58c6aa6c17d7e9948daf6" + assert file_node["piecemap"] == inner_path + ".piecemap.msgpack" + + piecemap = Msgpack.unpack(site.storage.open(file_node["piecemap"], "rb").read())["optional.any.iso"] + assert len(piecemap["sha512_pieces"]) == 10 + assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1] + assert binascii.hexlify(piecemap["sha512_pieces"][0]) == b"a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3" + + def testVerifyPiece(self, site): + inner_path = self.createBigfile(site) + + # Verify all 10 piece + f = site.storage.open(inner_path, "rb") + for i in range(10): + piece = io.BytesIO(f.read(1024 * 1024)) + piece.seek(0) + site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) + f.close() + + # Try to verify piece 0 with piece 1 hash + with pytest.raises(VerifyError) as err: + i = 1 + f = site.storage.open(inner_path, "rb") + piece = io.BytesIO(f.read(1024 * 1024)) + f.close() + site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) + assert "Invalid hash" in str(err) + + def testSparseFile(self, site): + inner_path = "sparsefile" + + # Create a 100MB sparse file + site.storage.createSparseFile(inner_path, 100 * 1024 * 1024) + + # Write to file beginning + s = time.time() + f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), b"hellostart" * 1024) + time_write_start = time.time() - s + + # Write to file end + s = time.time() + f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), b"helloend" * 1024) + time_write_end = time.time() - s + + # Verify writes + f = site.storage.open(inner_path) + assert f.read(10) == b"hellostart" + f.seek(99 * 1024 * 1024) + assert f.read(8) == b"helloend" + f.close() + + site.storage.delete(inner_path) + + # Writing to end shold not take much longer, than writing to start + assert time_write_end <= max(0.1, time_write_start * 1.1) + + def testRangedFileRequest(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + file_server.sites[site.address] = site + client = FileServer(file_server.ip, 1545) + client.sites[site_temp.address] = site_temp + site_temp.connection_server = client + connection = client.getConnection(file_server.ip, 1544) + + # Add file_server as peer to client + peer_file_server = site_temp.addPeer(file_server.ip, 1544) + + buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) + + assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size + assert buff.getvalue().startswith(b"Test524") # Correct data + buff.seek(0) + assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash + + connection.close() + client.stop() + + def testRangedFileDownload(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Make sure the file and the piecemap in the optional hashfield + file_info = site.content_manager.getFileInfo(inner_path) + assert site.content_manager.hashfield.hasHash(file_info["sha512"]) + + piecemap_hash = site.content_manager.getFileInfo(file_info["piecemap"])["sha512"] + assert site.content_manager.hashfield.hasHash(piecemap_hash) + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + peer_client = site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] + assert not bad_files + + # client_piecefield = peer_client.piecefields[file_info["sha512"]].tostring() + # assert client_piecefield == "1" * 10 + + # Download 5. and 10. block + + site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) + site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024)) + + # Verify 0. block not downloaded + f = site_temp.storage.open(inner_path) + assert f.read(10) == b"\0" * 10 + # Verify 5. and 10. block downloaded + f.seek(5 * 1024 * 1024) + assert f.read(7) == b"Test524" + f.seek(9 * 1024 * 1024) + assert f.read(7) == b"943---T" + + # Verify hashfield + assert set(site_temp.content_manager.hashfield) == set([18343, 43727]) # 18343: data/optional.any.iso, 43727: data/optional.any.iso.hashmap.msgpack + + def testOpenBigfile(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + with site_temp.storage.openBigfile(inner_path) as f: + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) + assert f.read(7) == b"Test524" + + f.seek(9 * 1024 * 1024) + assert f.read(7) == b"943---T" + + assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces + + assert set(site_temp.content_manager.hashfield) == set([18343, 43727]) + + assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001" + assert f.sha512 in site_temp.getSettingsCache()["piecefields"] + + # Test requesting already downloaded + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) + assert f.read(7) == b"Test524" + + assert len(requests) == 0 + + # Test requesting multi-block overflow reads + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) # We already have this block + data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block + assert data.startswith(b"Test524") + assert data.endswith(b"Test838-") + assert b"\0" not in data # No null bytes allowed + + assert len(requests) == 2 # Two block download + + # Test out of range request + f.seek(5 * 1024 * 1024) + data = f.read(1024 * 1024 * 30) + assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024) + + f.seek(30 * 1024 * 1024) + data = f.read(1024 * 1024 * 30) + assert len(data) == 0 + + @pytest.mark.parametrize("piecefield_obj", [BigfilePiecefield, BigfilePiecefieldPacked]) + def testPiecefield(self, piecefield_obj, site): + testdatas = [ + b"\x01" * 100 + b"\x00" * 900 + b"\x01" * 4000 + b"\x00" * 4999 + b"\x01", + b"\x00\x01\x00\x01\x00\x01" * 10 + b"\x00\x01" * 90 + b"\x01\x00" * 400 + b"\x00" * 4999, + b"\x01" * 10000, + b"\x00" * 10000 + ] + for testdata in testdatas: + piecefield = piecefield_obj() + + piecefield.frombytes(testdata) + assert piecefield.tobytes() == testdata + assert piecefield[0] == testdata[0] + assert piecefield[100] == testdata[100] + assert piecefield[1000] == testdata[1000] + assert piecefield[len(testdata) - 1] == testdata[len(testdata) - 1] + + packed = piecefield.pack() + piecefield_new = piecefield_obj() + piecefield_new.unpack(packed) + assert piecefield.tobytes() == piecefield_new.tobytes() + assert piecefield_new.tobytes() == testdata + + def testFileGet(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + site_temp.connection_server = FileServer(file_server.ip, 1545) + site_temp.connection_server.sites[site_temp.address] = site_temp + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Download second block + with site_temp.storage.openBigfile(inner_path) as f: + f.seek(1024 * 1024) + assert f.read(1024)[0:1] != b"\0" + + # Make sure first block not download + with site_temp.storage.open(inner_path) as f: + assert f.read(1024)[0:1] == b"\0" + + peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) + + # Should drop error on first block request + assert not peer2.getFile(site.address, "%s|0-%s" % (inner_path, 1024 * 1024 * 1)) + + # Should not drop error for second block request + assert peer2.getFile(site.address, "%s|%s-%s" % (inner_path, 1024 * 1024 * 1, 1024 * 1024 * 2)) + + def benchmarkPeerMemory(self, site, file_server): + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + import psutil, os + meminfo = psutil.Process(os.getpid()).memory_info + + mem_s = meminfo()[0] + s = time.time() + for i in range(25000): + site.addPeer(file_server.ip, i) + print("%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024)) # 0.082s MEM: + 6800KB + print(list(site.peers.values())[0].piecefields) + + def testUpdatePiecefield(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + server1 = file_server + server1.sites[site.address] = site + server2 = FileServer(file_server.ip, 1545) + server2.sites[site_temp.address] = site_temp + site_temp.connection_server = server2 + + # Add file_server as peer to client + server2_peer1 = site_temp.addPeer(file_server.ip, 1544) + + # Testing piecefield sync + assert len(server2_peer1.piecefields) == 0 + assert server2_peer1.updatePiecefields() # Query piecefields from peer + assert len(server2_peer1.piecefields) > 0 + + def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + server1 = file_server + server1.sites[site.address] = site + server2 = FileServer(file_server.ip, 1545) + server2.sites[site_temp.address] = site_temp + site_temp.connection_server = server2 + + # Add file_server as peer to client + server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working + + site_temp.downloadContent("content.json", download_files=False) + site_temp.needFile("data/optional.any.iso.piecemap.msgpack") + + # Add fake peers with optional files downloaded + for i in range(5): + fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544) + fake_peer.hashfield = site.content_manager.hashfield + fake_peer.has_hashfield = True + + with Spy.Spy(WorkerManager, "addWorker") as requests: + site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) + site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024)) + + # It should only request parts from peer1 as the other peers does not have the requested parts in piecefields + assert len([request[1] for request in requests if request[1] != server2_peer1]) == 0 + + def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + server1 = file_server + server1.sites[site.address] = site + server2 = FileServer(file_server.ip, 1545) + server2.sites[site_temp.address] = site_temp + site_temp.connection_server = server2 + sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] + + # Create 10 fake peer for each piece + for i in range(10): + peer = Peer(file_server.ip, 1544, site_temp, server2) + peer.piecefields[sha512][i] = b"\x01" + peer.updateHashfield = mock.MagicMock(return_value=False) + peer.updatePiecefields = mock.MagicMock(return_value=False) + peer.findHashIds = mock.MagicMock(return_value={"nope": []}) + peer.hashfield = site.content_manager.hashfield + peer.has_hashfield = True + peer.key = "Peer:%s" % i + site_temp.peers["Peer:%s" % i] = peer + + site_temp.downloadContent("content.json", download_files=False) + site_temp.needFile("data/optional.any.iso.piecemap.msgpack") + + with Spy.Spy(Peer, "getFile") as requests: + for i in range(10): + site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024)) + + assert len(requests) == 10 + for i in range(10): + assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer + + def testDownloadStats(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + # Check size before downloads + assert site_temp.settings["size"] < 10 * 1024 * 1024 + assert site_temp.settings["optional_downloaded"] == 0 + size_piecemap = site_temp.content_manager.getFileInfo(inner_path + ".piecemap.msgpack")["size"] + size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"] + + with site_temp.storage.openBigfile(inner_path) as f: + assert b"\0" not in f.read(1024) + assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile + + with site_temp.storage.openBigfile(inner_path) as f: + # Don't count twice + assert b"\0" not in f.read(1024) + assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile + + # Add second block + assert b"\0" not in f.read(1024 * 1024) + assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile + + def testPrebuffer(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f: + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) + assert f.read(7) == b"Test524" + # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces + assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2 + + time.sleep(0.5) # Wait prebuffer download + + sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] + assert site_temp.storage.piecefields[sha512].tostring() == "0000011100" + + # No prebuffer beyond end of the file + f.seek(9 * 1024 * 1024) + assert b"\0" not in f.read(7) + + assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0 + + def testDownloadAllPieces(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + with Spy.Spy(FileRequest, "route") as requests: + site_temp.needFile("%s|all" % inner_path) + + assert len(requests) == 12 # piecemap.msgpack, getPiecefields, 10 x piece + + # Don't re-download already got pieces + with Spy.Spy(FileRequest, "route") as requests: + site_temp.needFile("%s|all" % inner_path) + + assert len(requests) == 0 + + def testFileSize(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + # Download first block + site_temp.needFile("%s|%s-%s" % (inner_path, 0 * 1024 * 1024, 1 * 1024 * 1024)) + assert site_temp.storage.getSize(inner_path) < 1000 * 1000 * 10 # Size on the disk should be smaller than the real size + + site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024)) + assert site_temp.storage.getSize(inner_path) == site.storage.getSize(inner_path) + + def testFileRename(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + site_temp.connection_server = FileServer(file_server.ip, 1545) + site_temp.connection_server.sites[site_temp.address] = site_temp + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + with Spy.Spy(FileRequest, "route") as requests: + site_temp.needFile("%s|%s-%s" % (inner_path, 0, 1 * self.piece_size)) + + assert len([req for req in requests if req[1] == "streamFile"]) == 2 # 1 piece + piecemap + + # Rename the file + inner_path_new = inner_path.replace(".iso", "-new.iso") + site.storage.rename(inner_path, inner_path_new) + site.storage.delete("data/optional.any.iso.piecemap.msgpack") + assert site.content_manager.sign("content.json", self.privatekey, remove_missing_optional=True) + + files_optional = site.content_manager.contents["content.json"]["files_optional"].keys() + + assert "data/optional.any-new.iso.piecemap.msgpack" in files_optional + assert "data/optional.any.iso.piecemap.msgpack" not in files_optional + assert "data/optional.any.iso" not in files_optional + + with Spy.Spy(FileRequest, "route") as requests: + site.publish() + time.sleep(0.1) + site_temp.download(blind_includes=True).join(timeout=5) # Wait for download + + assert len([req[1] for req in requests if req[1] == "streamFile"]) == 0 + + with site_temp.storage.openBigfile(inner_path_new, prebuffer=0) as f: + f.read(1024) + + # First piece already downloaded + assert [req for req in requests if req[1] == "streamFile"] == [] + + # Second piece needs to be downloaded + changed piecemap + f.seek(self.piece_size) + f.read(1024) + assert [req[3]["inner_path"] for req in requests if req[1] == "streamFile"] == [inner_path_new + ".piecemap.msgpack", inner_path_new] + + @pytest.mark.parametrize("size", [1024 * 3, 1024 * 1024 * 3, 1024 * 1024 * 30]) + def testNullFileRead(self, file_server, site, site_temp, size): + inner_path = "data/optional.iso" + + f = site.storage.open(inner_path, "w") + f.write("\0" * size) + f.close() + assert site.content_manager.sign("content.json", self.privatekey) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + site_temp.connection_server = FileServer(file_server.ip, 1545) + site_temp.connection_server.sites[site_temp.address] = site_temp + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + if "piecemap" in site.content_manager.getFileInfo(inner_path): # Bigfile + site_temp.needFile(inner_path + "|all") + else: + site_temp.needFile(inner_path) + + + assert site_temp.storage.getSize(inner_path) == size diff --git a/plugins/Bigfile/Test/conftest.py b/plugins/Bigfile/Test/conftest.py new file mode 100644 index 00000000..634e66e2 --- /dev/null +++ b/plugins/Bigfile/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * diff --git a/plugins/Bigfile/Test/pytest.ini b/plugins/Bigfile/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/Bigfile/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/Bigfile/__init__.py b/plugins/Bigfile/__init__.py new file mode 100644 index 00000000..cf2dcb49 --- /dev/null +++ b/plugins/Bigfile/__init__.py @@ -0,0 +1,2 @@ +from . import BigfilePlugin +from .BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked \ No newline at end of file diff --git a/plugins/Chart/ChartCollector.py b/plugins/Chart/ChartCollector.py new file mode 100644 index 00000000..776343af --- /dev/null +++ b/plugins/Chart/ChartCollector.py @@ -0,0 +1,181 @@ +import time +import sys +import collections +import itertools +import logging + +import gevent +from util import helper +from Config import config + + +class ChartCollector(object): + def __init__(self, db): + self.db = db + if config.action == "main": + gevent.spawn_later(60 * 3, self.collector) + self.log = logging.getLogger("ChartCollector") + self.last_values = collections.defaultdict(dict) + + def setInitialLastValues(self, sites): + # Recover last value of site bytes/sent + for site in sites: + self.last_values["site:" + site.address]["site_bytes_recv"] = site.settings.get("bytes_recv", 0) + self.last_values["site:" + site.address]["site_bytes_sent"] = site.settings.get("bytes_sent", 0) + + def getCollectors(self): + collectors = {} + import main + file_server = main.file_server + sites = file_server.sites + if not sites: + return collectors + content_db = list(sites.values())[0].content_manager.contents.db + + # Connection stats + collectors["connection"] = lambda: len(file_server.connections) + collectors["connection_in"] = ( + lambda: len([1 for connection in file_server.connections if connection.type == "in"]) + ) + collectors["connection_onion"] = ( + lambda: len([1 for connection in file_server.connections if connection.ip.endswith(".onion")]) + ) + collectors["connection_ping_avg"] = ( + lambda: round(1000 * helper.avg( + [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay] + )) + ) + collectors["connection_ping_min"] = ( + lambda: round(1000 * min( + [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay] + )) + ) + collectors["connection_rev_avg"] = ( + lambda: helper.avg( + [connection.handshake["rev"] for connection in file_server.connections if connection.handshake] + ) + ) + + # Request stats + collectors["file_bytes_recv|change"] = lambda: file_server.bytes_recv + collectors["file_bytes_sent|change"] = lambda: file_server.bytes_sent + collectors["request_num_recv|change"] = lambda: file_server.num_recv + collectors["request_num_sent|change"] = lambda: file_server.num_sent + + # Limit + collectors["optional_limit"] = lambda: content_db.getOptionalLimitBytes() + collectors["optional_used"] = lambda: content_db.getOptionalUsedBytes() + collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()]) + + # Peers + collectors["peer"] = lambda peers: len(peers) + collectors["peer_onion"] = lambda peers: len([True for peer in peers if ".onion" in peer]) + + # Size + collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()]) + collectors["size_optional"] = lambda: sum([site.settings.get("size_optional", 0) for site in sites.values()]) + collectors["content"] = lambda: sum([len(site.content_manager.contents) for site in sites.values()]) + + return collectors + + def getSiteCollectors(self): + site_collectors = {} + + # Size + site_collectors["site_size"] = lambda site: site.settings.get("size", 0) + site_collectors["site_size_optional"] = lambda site: site.settings.get("size_optional", 0) + site_collectors["site_optional_downloaded"] = lambda site: site.settings.get("optional_downloaded", 0) + site_collectors["site_content"] = lambda site: len(site.content_manager.contents) + + # Data transfer + site_collectors["site_bytes_recv|change"] = lambda site: site.settings.get("bytes_recv", 0) + site_collectors["site_bytes_sent|change"] = lambda site: site.settings.get("bytes_sent", 0) + + # Peers + site_collectors["site_peer"] = lambda site: len(site.peers) + site_collectors["site_peer_onion"] = lambda site: len( + [True for peer in site.peers.values() if peer.ip.endswith(".onion")] + ) + site_collectors["site_peer_connected"] = lambda site: len([True for peer in site.peers.values() if peer.connection]) + + return site_collectors + + def getUniquePeers(self): + import main + sites = main.file_server.sites + return set(itertools.chain.from_iterable( + [site.peers.keys() for site in sites.values()] + )) + + def collectDatas(self, collectors, last_values, site=None): + if site is None: + peers = self.getUniquePeers() + datas = {} + for key, collector in collectors.items(): + try: + if site: + value = collector(site) + elif key.startswith("peer"): + value = collector(peers) + else: + value = collector() + except Exception as err: + self.log.info("Collector %s error: %s" % (key, err)) + value = None + + if "|change" in key: # Store changes relative to last value + key = key.replace("|change", "") + last_value = last_values.get(key, 0) + last_values[key] = value + value = value - last_value + + if value is None: + datas[key] = None + else: + datas[key] = round(value, 3) + return datas + + def collectGlobal(self, collectors, last_values): + now = int(time.time()) + s = time.time() + datas = self.collectDatas(collectors, last_values["global"]) + values = [] + for key, value in datas.items(): + values.append((self.db.getTypeId(key), value, now)) + self.log.debug("Global collectors done in %.3fs" % (time.time() - s)) + + s = time.time() + cur = self.db.getCursor() + cur.cursor.executemany("INSERT INTO data (type_id, value, date_added) VALUES (?, ?, ?)", values) + cur.close() + self.log.debug("Global collectors inserted in %.3fs" % (time.time() - s)) + + def collectSites(self, sites, collectors, last_values): + now = int(time.time()) + s = time.time() + values = [] + for address, site in sites.items(): + site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site) + for key, value in site_datas.items(): + values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now)) + time.sleep(0.001) + self.log.debug("Site collections done in %.3fs" % (time.time() - s)) + + s = time.time() + cur = self.db.getCursor() + cur.cursor.executemany("INSERT INTO data (type_id, site_id, value, date_added) VALUES (?, ?, ?, ?)", values) + cur.close() + self.log.debug("Site collectors inserted in %.3fs" % (time.time() - s)) + + def collector(self): + collectors = self.getCollectors() + site_collectors = self.getSiteCollectors() + import main + sites = main.file_server.sites + i = 0 + while 1: + self.collectGlobal(collectors, self.last_values) + if i % 12 == 0: # Only collect sites data every hour + self.collectSites(sites, site_collectors, self.last_values) + time.sleep(60 * 5) + i += 1 diff --git a/plugins/Chart/ChartDb.py b/plugins/Chart/ChartDb.py new file mode 100644 index 00000000..9dd4d3db --- /dev/null +++ b/plugins/Chart/ChartDb.py @@ -0,0 +1,133 @@ +from Config import config +from Db.Db import Db +import time + + +class ChartDb(Db): + def __init__(self): + self.version = 2 + super(ChartDb, self).__init__(self.getSchema(), "%s/chart.db" % config.data_dir) + self.foreign_keys = True + self.checkTables() + self.sites = self.loadSites() + self.types = self.loadTypes() + + def getSchema(self): + schema = {} + schema["db_name"] = "Chart" + schema["tables"] = {} + schema["tables"]["data"] = { + "cols": [ + ["data_id", "INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE"], + ["type_id", "INTEGER NOT NULL"], + ["site_id", "INTEGER"], + ["value", "INTEGER"], + ["date_added", "DATETIME DEFAULT (CURRENT_TIMESTAMP)"] + ], + "indexes": [ + "CREATE INDEX site_id ON data (site_id)", + "CREATE INDEX date_added ON data (date_added)" + ], + "schema_changed": 2 + } + schema["tables"]["type"] = { + "cols": [ + ["type_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"], + ["name", "TEXT"] + ], + "schema_changed": 1 + } + schema["tables"]["site"] = { + "cols": [ + ["site_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"], + ["address", "TEXT"] + ], + "schema_changed": 1 + } + return schema + + def getTypeId(self, name): + if name not in self.types: + self.execute("INSERT INTO type ?", {"name": name}) + self.types[name] = self.cur.cursor.lastrowid + + return self.types[name] + + def getSiteId(self, address): + if address not in self.sites: + self.execute("INSERT INTO site ?", {"address": address}) + self.sites[address] = self.cur.cursor.lastrowid + + return self.sites[address] + + def loadSites(self): + sites = {} + for row in self.execute("SELECT * FROM site"): + sites[row["address"]] = row["site_id"] + return sites + + def loadTypes(self): + types = {} + for row in self.execute("SELECT * FROM type"): + types[row["name"]] = row["type_id"] + return types + + def deleteSite(self, address): + if address in self.sites: + site_id = self.sites[address] + del self.sites[address] + self.execute("DELETE FROM site WHERE ?", {"site_id": site_id}) + self.execute("DELETE FROM data WHERE ?", {"site_id": site_id}) + + def archive(self): + week_back = 1 + while 1: + s = time.time() + date_added_from = time.time() - 60 * 60 * 24 * 7 * (week_back + 1) + date_added_to = date_added_from + 60 * 60 * 24 * 7 + res = self.execute(""" + SELECT + MAX(date_added) AS date_added, + SUM(value) AS value, + GROUP_CONCAT(data_id) AS data_ids, + type_id, + site_id, + COUNT(*) AS num + FROM data + WHERE + site_id IS NULL AND + date_added > :date_added_from AND + date_added < :date_added_to + GROUP BY strftime('%Y-%m-%d %H', date_added, 'unixepoch', 'localtime'), type_id + """, {"date_added_from": date_added_from, "date_added_to": date_added_to}) + + num_archived = 0 + cur = self.getCursor() + for row in res: + if row["num"] == 1: + continue + cur.execute("INSERT INTO data ?", { + "type_id": row["type_id"], + "site_id": row["site_id"], + "value": row["value"], + "date_added": row["date_added"] + }) + cur.execute("DELETE FROM data WHERE data_id IN (%s)" % row["data_ids"]) + num_archived += row["num"] + self.log.debug("Archived %s data from %s weeks ago in %.3fs" % (num_archived, week_back, time.time() - s)) + week_back += 1 + time.sleep(0.1) + if num_archived == 0: + break + # Only keep 6 month of global stats + self.execute( + "DELETE FROM data WHERE site_id IS NULL AND date_added < :date_added_limit", + {"date_added_limit": time.time() - 60 * 60 * 24 * 30 * 6 } + ) + # Only keep 1 month of site stats + self.execute( + "DELETE FROM data WHERE site_id IS NOT NULL AND date_added < :date_added_limit", + {"date_added_limit": time.time() - 60 * 60 * 24 * 30 } + ) + if week_back > 1: + self.execute("VACUUM") diff --git a/plugins/Chart/ChartPlugin.py b/plugins/Chart/ChartPlugin.py new file mode 100644 index 00000000..ddc1e609 --- /dev/null +++ b/plugins/Chart/ChartPlugin.py @@ -0,0 +1,60 @@ +import time +import itertools + +import gevent + +from Config import config +from util import helper +from Plugin import PluginManager +from .ChartDb import ChartDb +from .ChartCollector import ChartCollector + +if "db" not in locals().keys(): # Share on reloads + db = ChartDb() + gevent.spawn_later(10 * 60, db.archive) + helper.timer(60 * 60 * 6, db.archive) + collector = ChartCollector(db) + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + def load(self, *args, **kwargs): + back = super(SiteManagerPlugin, self).load(*args, **kwargs) + collector.setInitialLastValues(self.sites.values()) + return back + + def delete(self, address, *args, **kwargs): + db.deleteSite(address) + return super(SiteManagerPlugin, self).delete(address, *args, **kwargs) + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def actionChartDbQuery(self, to, query, params=None): + if not "ADMIN" in self.permissions: + return {"error": "No permission"} + + if config.debug or config.verbose: + s = time.time() + rows = [] + try: + if not query.strip().upper().startswith("SELECT"): + raise Exception("Only SELECT query supported") + res = db.execute(query, params) + except Exception as err: # Response the error to client + self.log.error("ChartDbQuery error: %s" % err) + return {"error": str(err)} + # Convert result to dict + for row in res: + rows.append(dict(row)) + if config.verbose and time.time() - s > 0.1: # Log slow query + self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s)) + return rows + + def actionChartGetPeerLocations(self, to): + if not "ADMIN" in self.permissions: + return {"error": "No permission"} + + peers = {} + for site in self.server.sites.values(): + peers.update(site.peers) + peer_locations = self.getPeerLocations(peers) + return peer_locations diff --git a/plugins/Chart/__init__.py b/plugins/Chart/__init__.py new file mode 100644 index 00000000..2c284609 --- /dev/null +++ b/plugins/Chart/__init__.py @@ -0,0 +1 @@ +from . import ChartPlugin \ No newline at end of file diff --git a/plugins/ContentFilter/ContentFilterPlugin.py b/plugins/ContentFilter/ContentFilterPlugin.py new file mode 100644 index 00000000..f6d74e7a --- /dev/null +++ b/plugins/ContentFilter/ContentFilterPlugin.py @@ -0,0 +1,216 @@ +import time +import re +import html +import hashlib + +from Plugin import PluginManager +from Translate import Translate +from Config import config + +from .ContentFilterStorage import ContentFilterStorage + + +if "_" not in locals(): + _ = Translate("plugins/ContentFilter/languages/") + + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + def load(self, *args, **kwargs): + global filter_storage + super(SiteManagerPlugin, self).load(*args, **kwargs) + filter_storage = ContentFilterStorage(site_manager=self) + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + # Mute + def cbMuteAdd(self, to, auth_address, cert_user_id, reason): + filter_storage.file_content["mutes"][auth_address] = { + "cert_user_id": cert_user_id, "reason": reason, "source": self.site.address, "date_added": time.time() + } + filter_storage.save() + filter_storage.changeDbs(auth_address, "remove") + self.response(to, "ok") + + def actionMuteAdd(self, to, auth_address, cert_user_id, reason): + if "ADMIN" in self.getPermissions(to): + self.cbMuteAdd(to, auth_address, cert_user_id, reason) + else: + self.cmd( + "confirm", + [_["Hide all content from %s?"] % html.escape(cert_user_id), _["Mute"]], + lambda res: self.cbMuteAdd(to, auth_address, cert_user_id, reason) + ) + + def cbMuteRemove(self, to, auth_address): + del filter_storage.file_content["mutes"][auth_address] + filter_storage.save() + filter_storage.changeDbs(auth_address, "load") + self.response(to, "ok") + + def actionMuteRemove(self, to, auth_address): + if "ADMIN" in self.getPermissions(to): + self.cbMuteRemove(to, auth_address) + else: + self.cmd( + "confirm", + [_["Unmute %s?"] % html.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]], + lambda res: self.cbMuteRemove(to, auth_address) + ) + + def actionMuteList(self, to): + if "ADMIN" in self.getPermissions(to): + self.response(to, filter_storage.file_content["mutes"]) + else: + return self.response(to, {"error": "Forbidden: Only ADMIN sites can list mutes"}) + + # Siteblock + def actionSiteblockAdd(self, to, site_address, reason=None): + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can add to blocklist"}) + filter_storage.file_content["siteblocks"][site_address] = {"date_added": time.time(), "reason": reason} + filter_storage.save() + self.response(to, "ok") + + def actionSiteblockRemove(self, to, site_address): + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can remove from blocklist"}) + del filter_storage.file_content["siteblocks"][site_address] + filter_storage.save() + self.response(to, "ok") + + def actionSiteblockList(self, to): + if "ADMIN" in self.getPermissions(to): + self.response(to, filter_storage.file_content["siteblocks"]) + else: + return self.response(to, {"error": "Forbidden: Only ADMIN sites can list blocklists"}) + + # Include + def actionFilterIncludeAdd(self, to, inner_path, description=None, address=None): + if address: + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"}) + site = self.server.sites[address] + else: + address = self.site.address + site = self.site + + if "ADMIN" in self.getPermissions(to): + self.cbFilterIncludeAdd(to, True, address, inner_path, description) + else: + content = site.storage.loadJson(inner_path) + title = _["New shared global content filter: %s (%s sites, %s users)"] % ( + html.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {})) + ) + + self.cmd( + "confirm", + [title, "Add"], + lambda res: self.cbFilterIncludeAdd(to, res, address, inner_path, description) + ) + + def cbFilterIncludeAdd(self, to, res, address, inner_path, description): + if not res: + self.response(to, res) + return False + + filter_storage.includeAdd(address, inner_path, description) + self.response(to, "ok") + + def actionFilterIncludeRemove(self, to, inner_path, address=None): + if address: + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"}) + else: + address = self.site.address + + key = "%s/%s" % (address, inner_path) + if key not in filter_storage.file_content["includes"]: + self.response(to, {"error": "Include not found"}) + filter_storage.includeRemove(address, inner_path) + self.response(to, "ok") + + def actionFilterIncludeList(self, to, all_sites=False, filters=False): + if all_sites and "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can list all sites includes"}) + + back = [] + includes = filter_storage.file_content.get("includes", {}).values() + for include in includes: + if not all_sites and include["address"] != self.site.address: + continue + if filters: + include = dict(include) # Don't modify original file_content + include_site = filter_storage.site_manager.get(include["address"]) + if not include_site: + continue + content = include_site.storage.loadJson(include["inner_path"]) + include["mutes"] = content.get("mutes", {}) + include["siteblocks"] = content.get("siteblocks", {}) + back.append(include) + self.response(to, back) + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def updateDbFile(self, inner_path, file=None, cur=None): + if file is not False: # File deletion always allowed + # Find for bitcoin addresses in file path + matches = re.findall("/(1[A-Za-z0-9]{26,35})/", inner_path) + # Check if any of the adresses are in the mute list + for auth_address in matches: + if filter_storage.isMuted(auth_address): + self.log.debug("Mute match: %s, ignoring %s" % (auth_address, inner_path)) + return False + + return super(SiteStoragePlugin, self).updateDbFile(inner_path, file=file, cur=cur) + + def onUpdated(self, inner_path, file=None): + file_path = "%s/%s" % (self.site.address, inner_path) + if file_path in filter_storage.file_content["includes"]: + self.log.debug("Filter file updated: %s" % inner_path) + filter_storage.includeUpdateAll() + return super(SiteStoragePlugin, self).onUpdated(inner_path, file=file) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionWrapper(self, path, extra_headers=None): + match = re.match("/(?P
[A-Za-z0-9\._-]+)(?P/.*|$)", path) + if not match: + return False + address = match.group("address") + + if self.server.site_manager.get(address): # Site already exists + return super(UiRequestPlugin, self).actionWrapper(path, extra_headers) + + if self.server.site_manager.isDomain(address): + address = self.server.site_manager.resolveDomain(address) + + if address: + address_sha256 = "0x" + hashlib.sha256(address.encode("utf8")).hexdigest() + else: + address_sha256 = None + + if filter_storage.isSiteblocked(address) or filter_storage.isSiteblocked(address_sha256): + site = self.server.site_manager.get(config.homepage) + if not extra_headers: + extra_headers = {} + + script_nonce = self.getScriptNonce() + + self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) + return iter([super(UiRequestPlugin, self).renderWrapper( + site, path, "uimedia/plugins/contentfilter/blocklisted.html?address=" + address, + "Blacklisted site", extra_headers, show_loadingscreen=False, script_nonce=script_nonce + )]) + else: + return super(UiRequestPlugin, self).actionWrapper(path, extra_headers) + + def actionUiMedia(self, path, *args, **kwargs): + if path.startswith("/uimedia/plugins/contentfilter/"): + file_path = path.replace("/uimedia/plugins/contentfilter/", "plugins/ContentFilter/media/") + return self.actionFile(file_path) + else: + return super(UiRequestPlugin, self).actionUiMedia(path) diff --git a/plugins/ContentFilter/ContentFilterStorage.py b/plugins/ContentFilter/ContentFilterStorage.py new file mode 100644 index 00000000..3df0b435 --- /dev/null +++ b/plugins/ContentFilter/ContentFilterStorage.py @@ -0,0 +1,140 @@ +import os +import json +import logging +import collections +import time + +from Debug import Debug +from Plugin import PluginManager +from Config import config +from util import helper + +class ContentFilterStorage(object): + def __init__(self, site_manager): + self.log = logging.getLogger("ContentFilterStorage") + self.file_path = "%s/filters.json" % config.data_dir + self.site_manager = site_manager + self.file_content = self.load() + + # Set default values for filters.json + if not self.file_content: + self.file_content = {} + + # Site blacklist renamed to site blocks + if "site_blacklist" in self.file_content: + self.file_content["siteblocks"] = self.file_content["site_blacklist"] + del self.file_content["site_blacklist"] + + for key in ["mutes", "siteblocks", "includes"]: + if key not in self.file_content: + self.file_content[key] = {} + + self.include_filters = collections.defaultdict(set) # Merged list of mutes and blacklists from all include + self.includeUpdateAll(update_site_dbs=False) + + def load(self): + # Rename previously used mutes.json -> filters.json + if os.path.isfile("%s/mutes.json" % config.data_dir): + self.log.info("Renaming mutes.json to filters.json...") + os.rename("%s/mutes.json" % config.data_dir, self.file_path) + if os.path.isfile(self.file_path): + try: + return json.load(open(self.file_path)) + except Exception as err: + self.log.error("Error loading filters.json: %s" % err) + return None + else: + return None + + def includeUpdateAll(self, update_site_dbs=True): + s = time.time() + new_include_filters = collections.defaultdict(set) + + # Load all include files data into a merged set + for include_path in self.file_content["includes"]: + address, inner_path = include_path.split("/", 1) + try: + content = self.site_manager.get(address).storage.loadJson(inner_path) + except Exception as err: + self.log.warning( + "Error loading include %s: %s" % + (include_path, Debug.formatException(err)) + ) + continue + + for key, val in content.items(): + if type(val) is not dict: + continue + + new_include_filters[key].update(val.keys()) + + mutes_added = new_include_filters["mutes"].difference(self.include_filters["mutes"]) + mutes_removed = self.include_filters["mutes"].difference(new_include_filters["mutes"]) + + self.include_filters = new_include_filters + + if update_site_dbs: + for auth_address in mutes_added: + self.changeDbs(auth_address, "remove") + + for auth_address in mutes_removed: + if not self.isMuted(auth_address): + self.changeDbs(auth_address, "load") + + num_mutes = len(self.include_filters["mutes"]) + num_siteblocks = len(self.include_filters["siteblocks"]) + self.log.debug( + "Loaded %s mutes, %s blocked sites from %s includes in %.3fs" % + (num_mutes, num_siteblocks, len(self.file_content["includes"]), time.time() - s) + ) + + def includeAdd(self, address, inner_path, description=None): + self.file_content["includes"]["%s/%s" % (address, inner_path)] = { + "date_added": time.time(), + "address": address, + "description": description, + "inner_path": inner_path + } + self.includeUpdateAll() + self.save() + + def includeRemove(self, address, inner_path): + del self.file_content["includes"]["%s/%s" % (address, inner_path)] + self.includeUpdateAll() + self.save() + + def save(self): + s = time.time() + helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True).encode("utf8")) + self.log.debug("Saved in %.3fs" % (time.time() - s)) + + def isMuted(self, auth_address): + if auth_address in self.file_content["mutes"] or auth_address in self.include_filters["mutes"]: + return True + else: + return False + + def isSiteblocked(self, address): + if address in self.file_content["siteblocks"] or address in self.include_filters["siteblocks"]: + return True + else: + return False + + # Search and remove or readd files of an user + def changeDbs(self, auth_address, action): + self.log.debug("Mute action %s on user %s" % (action, auth_address)) + res = list(self.site_manager.list().values())[0].content_manager.contents.db.execute( + "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path", + {"inner_path": "%%/%s/%%" % auth_address} + ) + for row in res: + site = self.site_manager.sites.get(row["address"]) + if not site: + continue + dir_inner_path = helper.getDirname(row["inner_path"]) + for file_name in site.storage.walk(dir_inner_path): + if action == "remove": + site.storage.onUpdated(dir_inner_path + file_name, False) + else: + site.storage.onUpdated(dir_inner_path + file_name) + site.onFileDone(dir_inner_path + file_name) diff --git a/plugins/ContentFilter/Test/TestContentFilter.py b/plugins/ContentFilter/Test/TestContentFilter.py new file mode 100644 index 00000000..e1b37b16 --- /dev/null +++ b/plugins/ContentFilter/Test/TestContentFilter.py @@ -0,0 +1,82 @@ +import pytest +from ContentFilter import ContentFilterPlugin +from Site import SiteManager + + +@pytest.fixture +def filter_storage(): + ContentFilterPlugin.filter_storage = ContentFilterPlugin.ContentFilterStorage(SiteManager.site_manager) + return ContentFilterPlugin.filter_storage + + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestContentFilter: + def createInclude(self, site): + site.storage.writeJson("filters.json", { + "mutes": {"1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C": {}}, + "siteblocks": {site.address: {}} + }) + + def testIncludeLoad(self, site, filter_storage): + self.createInclude(site) + filter_storage.file_content["includes"]["%s/%s" % (site.address, "filters.json")] = { + "date_added": 1528295893, + } + + assert not filter_storage.include_filters["mutes"] + assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert not filter_storage.isSiteblocked(site.address) + filter_storage.includeUpdateAll(update_site_dbs=False) + assert len(filter_storage.include_filters["mutes"]) == 1 + assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert filter_storage.isSiteblocked(site.address) + + def testIncludeAdd(self, site, filter_storage): + self.createInclude(site) + query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C'" + assert not filter_storage.isSiteblocked(site.address) + assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert site.storage.query(query_num_json).fetchone()["num"] == 2 + + # Add include + filter_storage.includeAdd(site.address, "filters.json") + + assert filter_storage.isSiteblocked(site.address) + assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert site.storage.query(query_num_json).fetchone()["num"] == 0 + + # Remove include + filter_storage.includeRemove(site.address, "filters.json") + + assert not filter_storage.isSiteblocked(site.address) + assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert site.storage.query(query_num_json).fetchone()["num"] == 2 + + def testIncludeChange(self, site, filter_storage): + self.createInclude(site) + filter_storage.includeAdd(site.address, "filters.json") + assert filter_storage.isSiteblocked(site.address) + assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + + # Add new blocked site + assert not filter_storage.isSiteblocked("1Hello") + + filter_content = site.storage.loadJson("filters.json") + filter_content["siteblocks"]["1Hello"] = {} + site.storage.writeJson("filters.json", filter_content) + + assert filter_storage.isSiteblocked("1Hello") + + # Add new muted user + query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q'" + assert not filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") + assert site.storage.query(query_num_json).fetchone()["num"] == 2 + + filter_content["mutes"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"] = {} + site.storage.writeJson("filters.json", filter_content) + + assert filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") + assert site.storage.query(query_num_json).fetchone()["num"] == 0 + + diff --git a/plugins/ContentFilter/Test/conftest.py b/plugins/ContentFilter/Test/conftest.py new file mode 100644 index 00000000..634e66e2 --- /dev/null +++ b/plugins/ContentFilter/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * diff --git a/plugins/ContentFilter/Test/pytest.ini b/plugins/ContentFilter/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/ContentFilter/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/ContentFilter/__init__.py b/plugins/ContentFilter/__init__.py new file mode 100644 index 00000000..2cbca8ee --- /dev/null +++ b/plugins/ContentFilter/__init__.py @@ -0,0 +1 @@ +from . import ContentFilterPlugin diff --git a/plugins/ContentFilter/languages/hu.json b/plugins/ContentFilter/languages/hu.json new file mode 100644 index 00000000..9b57e697 --- /dev/null +++ b/plugins/ContentFilter/languages/hu.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s tartalmaniak elrejtése?", + "Mute": "Elnémítás", + "Unmute %s?": "%s tartalmaniak megjelenítése?", + "Unmute": "Némítás visszavonása" +} diff --git a/plugins/ContentFilter/languages/it.json b/plugins/ContentFilter/languages/it.json new file mode 100644 index 00000000..9a2c6761 --- /dev/null +++ b/plugins/ContentFilter/languages/it.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s Vuoi nascondere i contenuti di questo utente ?", + "Mute": "Attiva Silenzia", + "Unmute %s?": "%s Vuoi mostrare i contenuti di questo utente ?", + "Unmute": "Disattiva Silenzia" +} diff --git a/plugins/ContentFilter/languages/pt-br.json b/plugins/ContentFilter/languages/pt-br.json new file mode 100644 index 00000000..3c6bfbdc --- /dev/null +++ b/plugins/ContentFilter/languages/pt-br.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s Ocultar todo o conteúdo de ?", + "Mute": "Ativar o Silêncio", + "Unmute %s?": "%s Você quer mostrar o conteúdo deste usuário ?", + "Unmute": "Desligar o silêncio" +} diff --git a/plugins/ContentFilter/languages/zh-tw.json b/plugins/ContentFilter/languages/zh-tw.json new file mode 100644 index 00000000..0995f3a0 --- /dev/null +++ b/plugins/ContentFilter/languages/zh-tw.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "屏蔽 %s 的所有內容?", + "Mute": "屏蔽", + "Unmute %s?": "對 %s 解除屏蔽?", + "Unmute": "解除屏蔽" +} diff --git a/plugins/ContentFilter/languages/zh.json b/plugins/ContentFilter/languages/zh.json new file mode 100644 index 00000000..bf63f107 --- /dev/null +++ b/plugins/ContentFilter/languages/zh.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "屏蔽 %s 的所有内容?", + "Mute": "屏蔽", + "Unmute %s?": "对 %s 解除屏蔽?", + "Unmute": "解除屏蔽" +} diff --git a/plugins/ContentFilter/media/blocklisted.html b/plugins/ContentFilter/media/blocklisted.html new file mode 100644 index 00000000..9a287b72 --- /dev/null +++ b/plugins/ContentFilter/media/blocklisted.html @@ -0,0 +1,107 @@ + + + + + +
+

Site blocked

+

This site is on your blocklist:

+
+
Too much image
+
on 2015-01-25 12:32:11
+
+ +
+ + + + + + diff --git a/plugins/ContentFilter/media/js/ZeroFrame.js b/plugins/ContentFilter/media/js/ZeroFrame.js new file mode 100644 index 00000000..d6facdbf --- /dev/null +++ b/plugins/ContentFilter/media/js/ZeroFrame.js @@ -0,0 +1,119 @@ +// Version 1.0.0 - Initial release +// Version 1.1.0 (2017-08-02) - Added cmdp function that returns promise instead of using callback +// Version 1.2.0 (2017-08-02) - Added Ajax monkey patch to emulate XMLHttpRequest over ZeroFrame API + +const CMD_INNER_READY = 'innerReady' +const CMD_RESPONSE = 'response' +const CMD_WRAPPER_READY = 'wrapperReady' +const CMD_PING = 'ping' +const CMD_PONG = 'pong' +const CMD_WRAPPER_OPENED_WEBSOCKET = 'wrapperOpenedWebsocket' +const CMD_WRAPPER_CLOSE_WEBSOCKET = 'wrapperClosedWebsocket' + +class ZeroFrame { + constructor(url) { + this.url = url + this.waiting_cb = {} + this.wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1") + this.connect() + this.next_message_id = 1 + this.init() + } + + init() { + return this + } + + connect() { + this.target = window.parent + window.addEventListener('message', e => this.onMessage(e), false) + this.cmd(CMD_INNER_READY) + } + + onMessage(e) { + let message = e.data + let cmd = message.cmd + if (cmd === CMD_RESPONSE) { + if (this.waiting_cb[message.to] !== undefined) { + this.waiting_cb[message.to](message.result) + } + else { + this.log("Websocket callback not found:", message) + } + } else if (cmd === CMD_WRAPPER_READY) { + this.cmd(CMD_INNER_READY) + } else if (cmd === CMD_PING) { + this.response(message.id, CMD_PONG) + } else if (cmd === CMD_WRAPPER_OPENED_WEBSOCKET) { + this.onOpenWebsocket() + } else if (cmd === CMD_WRAPPER_CLOSE_WEBSOCKET) { + this.onCloseWebsocket() + } else { + this.onRequest(cmd, message) + } + } + + onRequest(cmd, message) { + this.log("Unknown request", message) + } + + response(to, result) { + this.send({ + cmd: CMD_RESPONSE, + to: to, + result: result + }) + } + + cmd(cmd, params={}, cb=null) { + this.send({ + cmd: cmd, + params: params + }, cb) + } + + cmdp(cmd, params={}) { + return new Promise((resolve, reject) => { + this.cmd(cmd, params, (res) => { + if (res && res.error) { + reject(res.error) + } else { + resolve(res) + } + }) + }) + } + + send(message, cb=null) { + message.wrapper_nonce = this.wrapper_nonce + message.id = this.next_message_id + this.next_message_id++ + this.target.postMessage(message, '*') + if (cb) { + this.waiting_cb[message.id] = cb + } + } + + log(...args) { + console.log.apply(console, ['[ZeroFrame]'].concat(args)) + } + + onOpenWebsocket() { + this.log('Websocket open') + } + + onCloseWebsocket() { + this.log('Websocket close') + } + + monkeyPatchAjax() { + var page = this + XMLHttpRequest.prototype.realOpen = XMLHttpRequest.prototype.open + this.cmd("wrapperGetAjaxKey", [], (res) => { this.ajax_key = res }) + var newOpen = function (method, url, async) { + url += "?ajax_key=" + page.ajax_key + return this.realOpen(method, url, async) + } + XMLHttpRequest.prototype.open = newOpen + } +} diff --git a/plugins/Cors/CorsPlugin.py b/plugins/Cors/CorsPlugin.py new file mode 100644 index 00000000..af501462 --- /dev/null +++ b/plugins/Cors/CorsPlugin.py @@ -0,0 +1,104 @@ +import re +import html +import copy + +from Plugin import PluginManager +from Translate import Translate +if "_" not in locals(): + _ = Translate("plugins/Cors/languages/") + + +def getCorsPath(site, inner_path): + match = re.match("^cors-([A-Za-z0-9]{26,35})/(.*)", inner_path) + if not match: + raise Exception("Invalid cors path: %s" % inner_path) + cors_address = match.group(1) + cors_inner_path = match.group(2) + + if not "Cors:%s" % cors_address in site.settings["permissions"]: + raise Exception("This site has no permission to access site %s" % cors_address) + + return cors_address, cors_inner_path + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def hasSitePermission(self, address, cmd=None): + if super(UiWebsocketPlugin, self).hasSitePermission(address, cmd=cmd): + return True + + if not "Cors:%s" % address in self.site.settings["permissions"] or cmd not in ["fileGet", "fileList", "dirList", "fileRules", "optionalFileInfo", "fileQuery", "dbQuery", "userGetSettings", "siteInfo"]: + return False + else: + return True + + # Add cors support for file commands + def corsFuncWrapper(self, func_name, to, inner_path, *args, **kwargs): + if inner_path.startswith("cors-"): + cors_address, cors_inner_path = getCorsPath(self.site, inner_path) + + req_self = copy.copy(self) + req_self.site = self.server.sites.get(cors_address) # Change the site to the merged one + if not req_self.site: + return {"error": "No site found"} + + func = getattr(super(UiWebsocketPlugin, req_self), func_name) + back = func(to, cors_inner_path, *args, **kwargs) + return back + else: + func = getattr(super(UiWebsocketPlugin, self), func_name) + return func(to, inner_path, *args, **kwargs) + + def actionFileGet(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs) + + def actionFileList(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionFileList", to, inner_path, *args, **kwargs) + + def actionDirList(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionDirList", to, inner_path, *args, **kwargs) + + def actionFileRules(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs) + + def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs) + + def actionCorsPermission(self, to, address): + site = self.server.sites.get(address) + if site: + site_name = site.content_manager.contents.get("content.json", {}).get("title") + button_title = _["Grant"] + else: + site_name = address + button_title = _["Grant & Add"] + + if site and "Cors:" + address in self.permissions: + return "ignored" + + self.cmd( + "confirm", + [_["This site requests read permission to: %s"] % html.escape(site_name), button_title], + lambda res: self.cbCorsPermission(to, address) + ) + + def cbCorsPermission(self, to, address): + self.actionPermissionAdd(to, "Cors:" + address) + site = self.server.sites.get(address) + if not site: + self.server.site_manager.need(address) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Allow to load cross origin files using /cors-address/file.jpg + def parsePath(self, path): + path_parts = super(UiRequestPlugin, self).parsePath(path) + if "cors-" not in path: # Optimization + return path_parts + site = self.server.sites[path_parts["address"]] + try: + path_parts["address"], path_parts["inner_path"] = getCorsPath(site, path_parts["inner_path"]) + except: + return None + return path_parts diff --git a/plugins/Cors/__init__.py b/plugins/Cors/__init__.py new file mode 100644 index 00000000..bcaa502b --- /dev/null +++ b/plugins/Cors/__init__.py @@ -0,0 +1 @@ +from . import CorsPlugin \ No newline at end of file diff --git a/plugins/CryptMessage/CryptMessage.py b/plugins/CryptMessage/CryptMessage.py new file mode 100644 index 00000000..00c5d7c6 --- /dev/null +++ b/plugins/CryptMessage/CryptMessage.py @@ -0,0 +1,59 @@ +import hashlib +import base64 + +import lib.pybitcointools as btctools + +ecc_cache = {} + + +def eciesEncrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'): + import pyelliptic + pubkey_openssl = toOpensslPublickey(base64.b64decode(pubkey)) + curve, pubkey_x, pubkey_y, i = pyelliptic.ECC._decode_pubkey(pubkey_openssl) + if ephemcurve is None: + ephemcurve = curve + ephem = pyelliptic.ECC(curve=ephemcurve) + key = hashlib.sha512(ephem.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + key_e, key_m = key[:32], key[32:] + pubkey = ephem.get_pubkey() + iv = pyelliptic.OpenSSL.rand(pyelliptic.OpenSSL.get_cipher(ciphername).get_blocksize()) + ctx = pyelliptic.Cipher(key_e, iv, 1, ciphername) + ciphertext = iv + pubkey + ctx.ciphering(data) + mac = pyelliptic.hmac_sha256(key_m, ciphertext) + return key_e, ciphertext + mac + +def eciesDecrypt(encrypted_data, privatekey): + ecc_key = getEcc(privatekey) + return ecc_key.decrypt(base64.b64decode(encrypted_data)) + +def split(encrypted): + iv = encrypted[0:16] + ciphertext = encrypted[16 + 70:-32] + + return iv, ciphertext + + +def getEcc(privatekey=None): + import pyelliptic + global ecc_cache + if privatekey not in ecc_cache: + if privatekey: + publickey_bin = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin") + publickey_openssl = toOpensslPublickey(publickey_bin) + privatekey_openssl = toOpensslPrivatekey(privatekey) + ecc_cache[privatekey] = pyelliptic.ECC(curve='secp256k1', privkey=privatekey_openssl, pubkey=publickey_openssl) + else: + ecc_cache[None] = pyelliptic.ECC() + return ecc_cache[privatekey] + + +def toOpensslPrivatekey(privatekey): + privatekey_bin = btctools.encode_privkey(privatekey, "bin") + return b'\x02\xca\x00\x20' + privatekey_bin + + +def toOpensslPublickey(publickey): + publickey_bin = btctools.encode_pubkey(publickey, "bin") + publickey_bin = publickey_bin[1:] + publickey_openssl = b'\x02\xca\x00 ' + publickey_bin[:32] + b'\x00 ' + publickey_bin[32:] + return publickey_openssl diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py new file mode 100644 index 00000000..9ef2aab1 --- /dev/null +++ b/plugins/CryptMessage/CryptMessagePlugin.py @@ -0,0 +1,168 @@ +import base64 +import os + +from Plugin import PluginManager +from Crypt import CryptBitcoin, CryptHash +import lib.pybitcointools as btctools + +from . import CryptMessage + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def eciesDecrypt(self, encrypted, privatekey): + back = CryptMessage.getEcc(privatekey).decrypt(encrypted) + return back.decode("utf8") + + # - Actions - + + # Returns user's public key unique to site + # Return: Public key + def actionUserPublickey(self, to, index=0): + publickey = self.user.getEncryptPublickey(self.site.address, index) + self.response(to, publickey) + + # Encrypt a text using the publickey or user's sites unique publickey + # Return: Encrypted text using base64 encoding + def actionEciesEncrypt(self, to, text, publickey=0, return_aes_key=False): + if type(publickey) is int: # Encrypt using user's publickey + publickey = self.user.getEncryptPublickey(self.site.address, publickey) + aes_key, encrypted = CryptMessage.eciesEncrypt(text.encode("utf8"), publickey) + if return_aes_key: + self.response(to, [base64.b64encode(encrypted).decode("utf8"), base64.b64encode(aes_key).decode("utf8")]) + else: + self.response(to, base64.b64encode(encrypted).decode("utf8")) + + # Decrypt a text using privatekey or the user's site unique private key + # Return: Decrypted text or list of decrypted texts + def actionEciesDecrypt(self, to, param, privatekey=0): + if type(privatekey) is int: # Decrypt using user's privatekey + privatekey = self.user.getEncryptPrivatekey(self.site.address, privatekey) + + if type(param) == list: + encrypted_texts = param + else: + encrypted_texts = [param] + + texts = [] # Decoded texts + for encrypted_text in encrypted_texts: + try: + text = CryptMessage.eciesDecrypt(encrypted_text, privatekey).decode("utf8") + texts.append(text) + except Exception as err: + texts.append(None) + + if type(param) == list: + self.response(to, texts) + else: + self.response(to, texts[0]) + + # Encrypt a text using AES + # Return: Iv, AES key, Encrypted text + def actionAesEncrypt(self, to, text, key=None, iv=None): + import pyelliptic + + if key: + key = base64.b64decode(key) + else: + key = os.urandom(32) + + if iv: # Generate new AES key if not definied + iv = base64.b64decode(iv) + else: + iv = pyelliptic.Cipher.gen_IV('aes-256-cbc') + + if text: + encrypted = pyelliptic.Cipher(key, iv, 1, ciphername='aes-256-cbc').ciphering(text.encode("utf8")) + else: + encrypted = b"" + + res = [base64.b64encode(item).decode("utf8") for item in [key, iv, encrypted]] + self.response(to, res) + + # Decrypt a text using AES + # Return: Decrypted text + def actionAesDecrypt(self, to, *args): + import pyelliptic + + if len(args) == 3: # Single decrypt + encrypted_texts = [(args[0], args[1])] + keys = [args[2]] + else: # Batch decrypt + encrypted_texts, keys = args + + texts = [] # Decoded texts + for iv, encrypted_text in encrypted_texts: + encrypted_text = base64.b64decode(encrypted_text) + iv = base64.b64decode(iv) + text = None + for key in keys: + ctx = pyelliptic.Cipher(base64.b64decode(key), iv, 0, ciphername='aes-256-cbc') + try: + decrypted = ctx.ciphering(encrypted_text) + if decrypted and decrypted.decode("utf8"): # Valid text decoded + text = decrypted.decode("utf8") + except Exception as err: + pass + texts.append(text) + + if len(args) == 3: + self.response(to, texts[0]) + else: + self.response(to, texts) + + # Sign data using ECDSA + # Return: Signature + def actionEcdsaSign(self, to, data, privatekey=None): + if privatekey is None: # Sign using user's privatekey + privatekey = self.user.getAuthPrivatekey(self.site.address) + + self.response(to, CryptBitcoin.sign(data, privatekey)) + + # Verify data using ECDSA (address is either a address or array of addresses) + # Return: bool + def actionEcdsaVerify(self, to, data, address, signature): + self.response(to, CryptBitcoin.verify(data, address, signature)) + + # Gets the publickey of a given privatekey + def actionEccPrivToPub(self, to, privatekey): + self.response(to, btctools.privtopub(privatekey)) + + # Gets the address of a given publickey + def actionEccPubToAddr(self, to, publickey): + address = btctools.pubtoaddr(btctools.decode_pubkey(publickey)) + self.response(to, address) + + +@PluginManager.registerTo("User") +class UserPlugin(object): + def getEncryptPrivatekey(self, address, param_index=0): + assert param_index >= 0 and param_index <= 1000 + site_data = self.getSiteData(address) + + if site_data.get("cert"): # Different privatekey for different cert provider + index = param_index + self.getAddressAuthIndex(site_data["cert"]) + else: + index = param_index + + if "encrypt_privatekey_%s" % index not in site_data: + address_index = self.getAddressAuthIndex(address) + crypt_index = address_index + 1000 + index + site_data["encrypt_privatekey_%s" % index] = CryptBitcoin.hdPrivatekey(self.master_seed, crypt_index) + self.log.debug("New encrypt privatekey generated for %s:%s" % (address, index)) + return site_data["encrypt_privatekey_%s" % index] + + def getEncryptPublickey(self, address, param_index=0): + assert param_index >= 0 and param_index <= 1000 + site_data = self.getSiteData(address) + + if site_data.get("cert"): # Different privatekey for different cert provider + index = param_index + self.getAddressAuthIndex(site_data["cert"]) + else: + index = param_index + + if "encrypt_publickey_%s" % index not in site_data: + privatekey = self.getEncryptPrivatekey(address, param_index) + publickey = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin_compressed") + site_data["encrypt_publickey_%s" % index] = base64.b64encode(publickey).decode("utf8") + return site_data["encrypt_publickey_%s" % index] diff --git a/plugins/CryptMessage/Test/TestCrypt.py b/plugins/CryptMessage/Test/TestCrypt.py new file mode 100644 index 00000000..681c4790 --- /dev/null +++ b/plugins/CryptMessage/Test/TestCrypt.py @@ -0,0 +1,125 @@ +import pytest +import base64 +from CryptMessage import CryptMessage + + +@pytest.mark.usefixtures("resetSettings") +class TestCrypt: + publickey = "A3HatibU4S6eZfIQhVs2u7GLN5G9wXa9WwlkyYIfwYaj" + privatekey = "5JBiKFYBm94EUdbxtnuLi6cvNcPzcKymCUHBDf2B6aq19vvG3rL" + utf8_text = '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9' + ecies_encrypted_text = "R5J1RFIDOzE5bnWopvccmALKACCk/CRcd/KSE9OgExJKASyMbZ57JVSUenL2TpABMmcT+wAgr2UrOqClxpOWvIUwvwwupXnMbRTzthhIJJrTRW3sCJVaYlGEMn9DAcvbflgEkQX/MVVdLV3tWKySs1Vk8sJC/y+4pGYCrZz7vwDNEEERaqU=" + + @pytest.mark.parametrize("text", [b"hello", '\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9'.encode("utf8")]) + @pytest.mark.parametrize("text_repeat", [1, 10, 128, 1024]) + def testEncryptEcies(self, text, text_repeat): + text_repeated = text * text_repeat + aes_key, encrypted = CryptMessage.eciesEncrypt(text_repeated, self.publickey) + assert len(aes_key) == 32 + # assert len(encrypted) == 134 + int(len(text) / 16) * 16 # Not always true + + ecc = CryptMessage.getEcc(self.privatekey) + assert ecc.decrypt(encrypted) == text_repeated + + def testDecryptEcies(self, user): + encrypted = base64.b64decode(self.ecies_encrypted_text) + ecc = CryptMessage.getEcc(self.privatekey) + assert ecc.decrypt(encrypted) == b"hello" + + def testPublickey(self, ui_websocket): + pub = ui_websocket.testAction("UserPublickey", 0) + assert len(pub) == 44 # Compressed, b64 encoded publickey + + # Different pubkey for specificed index + assert ui_websocket.testAction("UserPublickey", 1) != ui_websocket.testAction("UserPublickey", 0) + + # Same publickey for same index + assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2) + + # Different publickey for different cert + site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) + site_data["cert"] = None + pub1 = ui_websocket.testAction("UserPublickey", 0) + + site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) + site_data["cert"] = "zeroid.bit" + pub2 = ui_websocket.testAction("UserPublickey", 0) + assert pub1 != pub2 + + def testEcies(self, ui_websocket): + ui_websocket.actionUserPublickey(0, 0) + pub = ui_websocket.ws.getResult() + + ui_websocket.actionEciesEncrypt(0, "hello", pub) + encrypted = ui_websocket.ws.getResult() + assert len(encrypted) == 180 + + # Don't allow decrypt using other privatekey index + ui_websocket.actionEciesDecrypt(0, encrypted, 123) + decrypted = ui_websocket.ws.getResult() + assert decrypted != "hello" + + # Decrypt using correct privatekey + ui_websocket.actionEciesDecrypt(0, encrypted) + decrypted = ui_websocket.ws.getResult() + assert decrypted == "hello" + + # Decrypt batch + ui_websocket.actionEciesDecrypt(0, [encrypted, "baad", encrypted]) + decrypted = ui_websocket.ws.getResult() + assert decrypted == ["hello", None, "hello"] + + def testEciesUtf8(self, ui_websocket): + # Utf8 test + ui_websocket.actionEciesEncrypt(0, self.utf8_text) + encrypted = ui_websocket.ws.getResult() + + ui_websocket.actionEciesDecrypt(0, encrypted) + assert ui_websocket.ws.getResult() == self.utf8_text + + def testEciesAes(self, ui_websocket): + ui_websocket.actionEciesEncrypt(0, "hello", return_aes_key=True) + ecies_encrypted, aes_key = ui_websocket.ws.getResult() + + # Decrypt using Ecies + ui_websocket.actionEciesDecrypt(0, ecies_encrypted) + assert ui_websocket.ws.getResult() == "hello" + + # Decrypt using AES + aes_iv, aes_encrypted = CryptMessage.split(base64.b64decode(ecies_encrypted)) + + ui_websocket.actionAesDecrypt(0, base64.b64encode(aes_iv), base64.b64encode(aes_encrypted), aes_key) + assert ui_websocket.ws.getResult() == "hello" + + def testAes(self, ui_websocket): + ui_websocket.actionAesEncrypt(0, "hello") + key, iv, encrypted = ui_websocket.ws.getResult() + + assert len(key) == 44 + assert len(iv) == 24 + assert len(encrypted) == 24 + + # Single decrypt + ui_websocket.actionAesDecrypt(0, iv, encrypted, key) + assert ui_websocket.ws.getResult() == "hello" + + # Batch decrypt + ui_websocket.actionAesEncrypt(0, "hello") + key2, iv2, encrypted2 = ui_websocket.ws.getResult() + + assert [key, iv, encrypted] != [key2, iv2, encrypted2] + + # 2 correct key + ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key]) + assert ui_websocket.ws.getResult() == ["hello", "hello", None, None] + + # 3 key + ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key, key2]) + assert ui_websocket.ws.getResult() == ["hello", "hello", None, "hello"] + + def testAesUtf8(self, ui_websocket): + ui_websocket.actionAesEncrypt(0, self.utf8_text) + key, iv, encrypted = ui_websocket.ws.getResult() + + ui_websocket.actionAesDecrypt(0, iv, encrypted, key) + assert ui_websocket.ws.getResult() == self.utf8_text diff --git a/plugins/CryptMessage/Test/conftest.py b/plugins/CryptMessage/Test/conftest.py new file mode 100644 index 00000000..8c1df5b2 --- /dev/null +++ b/plugins/CryptMessage/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/CryptMessage/Test/pytest.ini b/plugins/CryptMessage/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/CryptMessage/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/CryptMessage/__init__.py b/plugins/CryptMessage/__init__.py new file mode 100644 index 00000000..6aeb4e52 --- /dev/null +++ b/plugins/CryptMessage/__init__.py @@ -0,0 +1 @@ +from . import CryptMessagePlugin \ No newline at end of file diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py new file mode 100644 index 00000000..840961b7 --- /dev/null +++ b/plugins/FilePack/FilePackPlugin.py @@ -0,0 +1,196 @@ +import os +import re + +import gevent + +from Plugin import PluginManager +from Config import config +from Debug import Debug + +# Keep archive open for faster reponse times for large sites +archive_cache = {} + + +def closeArchive(archive_path): + if archive_path in archive_cache: + del archive_cache[archive_path] + + +def openArchive(archive_path, file_obj=None): + if archive_path not in archive_cache: + if archive_path.endswith("tar.gz"): + import tarfile + archive_cache[archive_path] = tarfile.open(archive_path, fileobj=file_obj, mode="r:gz") + elif archive_path.endswith("tar.bz2"): + import tarfile + archive_cache[archive_path] = tarfile.open(archive_path, fileobj=file_obj, mode="r:bz2") + else: + import zipfile + archive_cache[archive_path] = zipfile.ZipFile(file_obj or archive_path) + gevent.spawn_later(5, lambda: closeArchive(archive_path)) # Close after 5 sec + + archive = archive_cache[archive_path] + return archive + + +def openArchiveFile(archive_path, path_within, file_obj=None): + archive = openArchive(archive_path, file_obj=file_obj) + if archive_path.endswith(".zip"): + return archive.open(path_within) + else: + return archive.extractfile(path_within) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionSiteMedia(self, path, **kwargs): + if ".zip/" in path or ".tar.gz/" in path: + file_obj = None + path_parts = self.parsePath(path) + file_path = "%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"]) + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path) + archive_path, path_within = match.groups() + if archive_path not in archive_cache: + site = self.server.site_manager.get(path_parts["address"]) + if not site: + return self.actionSiteAddPrompt(path) + archive_inner_path = site.storage.getInnerPath(archive_path) + if not os.path.isfile(archive_path): + # Wait until file downloads + result = site.needFile(archive_inner_path, priority=10) + # Send virutal file path download finished event to remove loading screen + site.updateWebsocket(file_done=archive_inner_path) + if not result: + return self.error404(archive_inner_path) + file_obj = site.storage.openBigfile(archive_inner_path) + if file_obj == False: + file_obj = None + + header_allow_ajax = False + if self.get.get("ajax_key"): + requester_site = self.server.site_manager.get(path_parts["request_address"]) + if self.get["ajax_key"] == requester_site.settings["ajax_key"]: + header_allow_ajax = True + else: + return self.error403("Invalid ajax_key") + + try: + file = openArchiveFile(archive_path, path_within, file_obj=file_obj) + content_type = self.getContentType(file_path) + self.sendHeader(200, content_type=content_type, noscript=kwargs.get("header_noscript", False), allow_ajax=header_allow_ajax) + return self.streamFile(file) + except Exception as err: + self.log.debug("Error opening archive file: %s" % Debug.formatException(err)) + return self.error404(path) + + return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs) + + def streamFile(self, file): + for i in range(100): # Read max 6MB + try: + block = file.read(60 * 1024) + if block: + yield block + else: + raise StopIteration + except StopIteration: + file.close() + break + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def isFile(self, inner_path): + if ".zip/" in inner_path or ".tar.gz/" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", inner_path) + archive_inner_path, path_within = match.groups() + return super(SiteStoragePlugin, self).isFile(archive_inner_path) + else: + return super(SiteStoragePlugin, self).isFile(inner_path) + + def openArchive(self, inner_path): + archive_path = self.getPath(inner_path) + file_obj = None + if archive_path not in archive_cache: + if not os.path.isfile(archive_path): + result = self.site.needFile(inner_path, priority=10) + self.site.updateWebsocket(file_done=inner_path) + if not result: + raise Exception("Unable to download file") + file_obj = self.site.storage.openBigfile(inner_path) + if file_obj == False: + file_obj = None + + try: + archive = openArchive(archive_path, file_obj=file_obj) + except Exception as err: + raise Exception("Unable to download file: %s" % Debug.formatException(err)) + + return archive + + def walk(self, inner_path, *args, **kwags): + if ".zip" in inner_path or ".tar.gz" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) + archive_inner_path, path_within = match.groups() + archive = self.openArchive(archive_inner_path) + path_within = path_within.lstrip("/") + + if archive_inner_path.endswith(".zip"): + namelist = [name for name in archive.namelist() if not name.endswith("/")] + else: + namelist = [item.name for item in archive.getmembers() if not item.isdir()] + + namelist_relative = [] + for name in namelist: + if not name.startswith(path_within): + continue + name_relative = name.replace(path_within, "", 1).rstrip("/") + namelist_relative.append(name_relative) + + return namelist_relative + + else: + return super(SiteStoragePlugin, self).walk(inner_path, *args, **kwags) + + def list(self, inner_path, *args, **kwags): + if ".zip" in inner_path or ".tar.gz" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) + archive_inner_path, path_within = match.groups() + archive = self.openArchive(archive_inner_path) + path_within = path_within.lstrip("/") + + if archive_inner_path.endswith(".zip"): + namelist = [name for name in archive.namelist()] + else: + namelist = [item.name for item in archive.getmembers()] + + namelist_relative = [] + for name in namelist: + if not name.startswith(path_within): + continue + name_relative = name.replace(path_within, "", 1).rstrip("/") + + if "/" in name_relative: # File is in sub-directory + continue + + namelist_relative.append(name_relative) + return namelist_relative + + else: + return super(SiteStoragePlugin, self).list(inner_path, *args, **kwags) + + def read(self, inner_path, mode="rb"): + if ".zip/" in inner_path or ".tar.gz/" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) + archive_inner_path, path_within = match.groups() + archive = self.openArchive(archive_inner_path) + path_within = path_within.lstrip("/") + + if archive_inner_path.endswith(".zip"): + return archive.open(path_within).read() + else: + return archive.extractfile(path_within).read() + + else: + return super(SiteStoragePlugin, self).read(inner_path, mode) + diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py new file mode 100644 index 00000000..660a0920 --- /dev/null +++ b/plugins/FilePack/__init__.py @@ -0,0 +1 @@ +from . import FilePackPlugin \ No newline at end of file diff --git a/plugins/MergerSite/MergerSitePlugin.py b/plugins/MergerSite/MergerSitePlugin.py new file mode 100644 index 00000000..36c1dbae --- /dev/null +++ b/plugins/MergerSite/MergerSitePlugin.py @@ -0,0 +1,384 @@ +import re +import time +import copy + +from Plugin import PluginManager +from Translate import Translate +from util import RateLimit +from util import helper +from Debug import Debug +try: + import OptionalManager.UiWebsocketPlugin # To make optioanlFileInfo merger sites compatible +except Exception: + pass + +if "merger_db" not in locals().keys(): # To keep merger_sites between module reloads + merger_db = {} # Sites that allowed to list other sites {address: [type1, type2...]} + merged_db = {} # Sites that allowed to be merged to other sites {address: type, ...} + merged_to_merger = {} # {address: [site1, site2, ...]} cache + site_manager = None # Site manager for merger sites + +if "_" not in locals(): + _ = Translate("plugins/MergerSite/languages/") + + +# Check if the site has permission to this merger site +def checkMergerPath(address, inner_path): + merged_match = re.match("^merged-(.*?)/([A-Za-z0-9]{26,35})/", inner_path) + if merged_match: + merger_type = merged_match.group(1) + # Check if merged site is allowed to include other sites + if merger_type in merger_db.get(address, []): + # Check if included site allows to include + merged_address = merged_match.group(2) + if merged_db.get(merged_address) == merger_type: + inner_path = re.sub("^merged-(.*?)/([A-Za-z0-9]{26,35})/", "", inner_path) + return merged_address, inner_path + else: + raise Exception( + "Merger site (%s) does not have permission for merged site: %s (%s)" % + (merger_type, merged_address, merged_db.get(merged_address)) + ) + else: + raise Exception("No merger (%s) permission to load:
%s (%s not in %s)" % ( + address, inner_path, merger_type, merger_db.get(address, [])) + ) + else: + raise Exception("Invalid merger path: %s" % inner_path) + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + # Download new site + def actionMergerSiteAdd(self, to, addresses): + if type(addresses) != list: + # Single site add + addresses = [addresses] + # Check if the site has merger permission + merger_types = merger_db.get(self.site.address) + if not merger_types: + return self.response(to, {"error": "Not a merger site"}) + + if RateLimit.isAllowed(self.site.address + "-MergerSiteAdd", 10) and len(addresses) == 1: + # Without confirmation if only one site address and not called in last 10 sec + self.cbMergerSiteAdd(to, addresses) + else: + self.cmd( + "confirm", + [_["Add %s new site?"] % len(addresses), "Add"], + lambda res: self.cbMergerSiteAdd(to, addresses) + ) + self.response(to, "ok") + + # Callback of adding new site confirmation + def cbMergerSiteAdd(self, to, addresses): + added = 0 + for address in addresses: + added += 1 + site_manager.need(address) + if added: + self.cmd("notification", ["done", _["Added %s new site"] % added, 5000]) + RateLimit.called(self.site.address + "-MergerSiteAdd") + site_manager.updateMergerSites() + + # Delete a merged site + def actionMergerSiteDelete(self, to, address): + site = self.server.sites.get(address) + if not site: + return self.response(to, {"error": "No site found: %s" % address}) + + merger_types = merger_db.get(self.site.address) + if not merger_types: + return self.response(to, {"error": "Not a merger site"}) + if merged_db.get(address) not in merger_types: + return self.response(to, {"error": "Merged type (%s) not in %s" % (merged_db.get(address), merger_types)}) + + self.cmd("notification", ["done", _["Site deleted: %s"] % address, 5000]) + self.response(to, "ok") + + # Lists merged sites + def actionMergerSiteList(self, to, query_site_info=False): + merger_types = merger_db.get(self.site.address) + ret = {} + if not merger_types: + return self.response(to, {"error": "Not a merger site"}) + for address, merged_type in merged_db.items(): + if merged_type not in merger_types: + continue # Site not for us + if query_site_info: + site = self.server.sites.get(address) + ret[address] = self.formatSiteInfo(site, create_user=False) + else: + ret[address] = merged_type + self.response(to, ret) + + def hasSitePermission(self, address, *args, **kwargs): + if super(UiWebsocketPlugin, self).hasSitePermission(address, *args, **kwargs): + return True + else: + if self.site.address in [merger_site.address for merger_site in merged_to_merger.get(address, [])]: + return True + else: + return False + + # Add support merger sites for file commands + def mergerFuncWrapper(self, func_name, to, inner_path, *args, **kwargs): + if inner_path.startswith("merged-"): + merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) + + # Set the same cert for merged site + merger_cert = self.user.getSiteData(self.site.address).get("cert") + if merger_cert and self.user.getSiteData(merged_address).get("cert") != merger_cert: + self.user.setCert(merged_address, merger_cert) + + req_self = copy.copy(self) + req_self.site = self.server.sites.get(merged_address) # Change the site to the merged one + + func = getattr(super(UiWebsocketPlugin, req_self), func_name) + return func(to, merged_inner_path, *args, **kwargs) + else: + func = getattr(super(UiWebsocketPlugin, self), func_name) + return func(to, inner_path, *args, **kwargs) + + def actionFileList(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileList", to, inner_path, *args, **kwargs) + + def actionDirList(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionDirList", to, inner_path, *args, **kwargs) + + def actionFileGet(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs) + + def actionFileWrite(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileWrite", to, inner_path, *args, **kwargs) + + def actionFileDelete(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileDelete", to, inner_path, *args, **kwargs) + + def actionFileRules(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs) + + def actionFileNeed(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileNeed", to, inner_path, *args, **kwargs) + + def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs) + + def actionOptionalFileDelete(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionOptionalFileDelete", to, inner_path, *args, **kwargs) + + def actionBigfileUploadInit(self, to, inner_path, *args, **kwargs): + back = self.mergerFuncWrapper("actionBigfileUploadInit", to, inner_path, *args, **kwargs) + if inner_path.startswith("merged-"): + merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) + back["inner_path"] = "merged-%s/%s/%s" % (merged_db[merged_address], merged_address, back["inner_path"]) + return back + + # Add support merger sites for file commands with privatekey parameter + def mergerFuncWrapperWithPrivatekey(self, func_name, to, privatekey, inner_path, *args, **kwargs): + func = getattr(super(UiWebsocketPlugin, self), func_name) + if inner_path.startswith("merged-"): + merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) + merged_site = self.server.sites.get(merged_address) + + # Set the same cert for merged site + merger_cert = self.user.getSiteData(self.site.address).get("cert") + if merger_cert: + self.user.setCert(merged_address, merger_cert) + + site_before = self.site # Save to be able to change it back after we ran the command + self.site = merged_site # Change the site to the merged one + try: + back = func(to, privatekey, merged_inner_path, *args, **kwargs) + finally: + self.site = site_before # Change back to original site + return back + else: + return func(to, privatekey, inner_path, *args, **kwargs) + + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): + return self.mergerFuncWrapperWithPrivatekey("actionSiteSign", to, privatekey, inner_path, *args, **kwargs) + + def actionSitePublish(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): + return self.mergerFuncWrapperWithPrivatekey("actionSitePublish", to, privatekey, inner_path, *args, **kwargs) + + def actionPermissionAdd(self, to, permission): + super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission) + if permission.startswith("Merger"): + self.site.storage.rebuildDb() + + def actionPermissionDetails(self, to, permission): + if not permission.startswith("Merger"): + return super(UiWebsocketPlugin, self).actionPermissionDetails(to, permission) + + merger_type = permission.replace("Merger:", "") + if not re.match("^[A-Za-z0-9-]+$", merger_type): + raise Exception("Invalid merger_type: %s" % merger_type) + merged_sites = [] + for address, merged_type in merged_db.items(): + if merged_type != merger_type: + continue + site = self.server.sites.get(address) + try: + merged_sites.append(site.content_manager.contents.get("content.json").get("title", address)) + except Exception as err: + merged_sites.append(address) + + details = _["Read and write permissions to sites with merged type of %s "] % merger_type + details += _["(%s sites)"] % len(merged_sites) + details += "
%s
" % ", ".join(merged_sites) + self.response(to, details) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Allow to load merged site files using /merged-ZeroMe/address/file.jpg + def parsePath(self, path): + path_parts = super(UiRequestPlugin, self).parsePath(path) + if "merged-" not in path: # Optimization + return path_parts + path_parts["address"], path_parts["inner_path"] = checkMergerPath(path_parts["address"], path_parts["inner_path"]) + return path_parts + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + # Also rebuild from merged sites + def getDbFiles(self): + merger_types = merger_db.get(self.site.address) + + # First return the site's own db files + for item in super(SiteStoragePlugin, self).getDbFiles(): + yield item + + # Not a merger site, that's all + if not merger_types: + return + + merged_sites = [ + site_manager.sites[address] + for address, merged_type in merged_db.items() + if merged_type in merger_types + ] + found = 0 + for merged_site in merged_sites: + self.log.debug("Loading merged site: %s" % merged_site) + merged_type = merged_db[merged_site.address] + for content_inner_path, content in merged_site.content_manager.contents.items(): + # content.json file itself + if merged_site.storage.isFile(content_inner_path): # Missing content.json file + merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path) + yield merged_inner_path, merged_site.storage.getPath(content_inner_path) + else: + merged_site.log.error("[MISSING] %s" % content_inner_path) + # Data files in content.json + content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site + for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): + if not file_relative_path.endswith(".json"): + continue # We only interesed in json files + file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir + file_inner_path = file_inner_path.strip("/") # Strip leading / + if merged_site.storage.isFile(file_inner_path): + merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path) + yield merged_inner_path, merged_site.storage.getPath(file_inner_path) + else: + merged_site.log.error("[MISSING] %s" % file_inner_path) + found += 1 + if found % 100 == 0: + time.sleep(0.001) # Context switch to avoid UI block + + # Also notice merger sites on a merged site file change + def onUpdated(self, inner_path, file=None): + super(SiteStoragePlugin, self).onUpdated(inner_path, file) + + merged_type = merged_db.get(self.site.address) + + for merger_site in merged_to_merger.get(self.site.address, []): + if merger_site.address == self.site.address: # Avoid infinite loop + continue + virtual_path = "merged-%s/%s/%s" % (merged_type, self.site.address, inner_path) + if inner_path.endswith(".json"): + if file is not None: + merger_site.storage.onUpdated(virtual_path, file=file) + else: + merger_site.storage.onUpdated(virtual_path, file=self.open(inner_path)) + else: + merger_site.storage.onUpdated(virtual_path) + + +@PluginManager.registerTo("Site") +class SitePlugin(object): + def fileDone(self, inner_path): + super(SitePlugin, self).fileDone(inner_path) + + for merger_site in merged_to_merger.get(self.address, []): + if merger_site.address == self.address: + continue + for ws in merger_site.websockets: + ws.event("siteChanged", self, {"event": ["file_done", inner_path]}) + + def fileFailed(self, inner_path): + super(SitePlugin, self).fileFailed(inner_path) + + for merger_site in merged_to_merger.get(self.address, []): + if merger_site.address == self.address: + continue + for ws in merger_site.websockets: + ws.event("siteChanged", self, {"event": ["file_failed", inner_path]}) + + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + # Update merger site for site types + def updateMergerSites(self): + global merger_db, merged_db, merged_to_merger, site_manager + s = time.time() + merger_db = {} + merged_db = {} + merged_to_merger = {} + site_manager = self + if not self.sites: + return + for site in self.sites.values(): + # Update merged sites + try: + merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type") + except Exception as err: + self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err))) + continue + if merged_type: + merged_db[site.address] = merged_type + + # Update merger sites + for permission in site.settings["permissions"]: + if not permission.startswith("Merger:"): + continue + if merged_type: + self.log.error( + "Removing permission %s from %s: Merger and merged at the same time." % + (permission, site.address) + ) + site.settings["permissions"].remove(permission) + continue + merger_type = permission.replace("Merger:", "") + if site.address not in merger_db: + merger_db[site.address] = [] + merger_db[site.address].append(merger_type) + site_manager.sites[site.address] = site + + # Update merged to merger + if merged_type: + for merger_site in self.sites.values(): + if "Merger:" + merged_type in merger_site.settings["permissions"]: + if site.address not in merged_to_merger: + merged_to_merger[site.address] = [] + merged_to_merger[site.address].append(merger_site) + self.log.debug("Updated merger sites in %.3fs" % (time.time() - s)) + + def load(self, *args, **kwags): + super(SiteManagerPlugin, self).load(*args, **kwags) + self.updateMergerSites() + + def save(self, *args, **kwags): + super(SiteManagerPlugin, self).save(*args, **kwags) + self.updateMergerSites() diff --git a/plugins/MergerSite/__init__.py b/plugins/MergerSite/__init__.py new file mode 100644 index 00000000..2cf54611 --- /dev/null +++ b/plugins/MergerSite/__init__.py @@ -0,0 +1 @@ +from . import MergerSitePlugin \ No newline at end of file diff --git a/plugins/MergerSite/languages/es.json b/plugins/MergerSite/languages/es.json new file mode 100644 index 00000000..d554c3a9 --- /dev/null +++ b/plugins/MergerSite/languages/es.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "¿Agregar %s nuevo sitio?", + "Added %s new site": "Sitio %s agregado", + "Site deleted: %s": "Sitio removido: %s" +} diff --git a/plugins/MergerSite/languages/fr.json b/plugins/MergerSite/languages/fr.json new file mode 100644 index 00000000..9d59fde9 --- /dev/null +++ b/plugins/MergerSite/languages/fr.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Ajouter le site %s ?", + "Added %s new site": "Site %s ajouté", + "Site deleted: %s": "Site %s supprimé" +} diff --git a/plugins/MergerSite/languages/hu.json b/plugins/MergerSite/languages/hu.json new file mode 100644 index 00000000..8e377aaa --- /dev/null +++ b/plugins/MergerSite/languages/hu.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Új oldal hozzáadása: %s?", + "Added %s new site": "Új oldal hozzáadva: %s", + "Site deleted: %s": "Oldal törölve: %s" +} diff --git a/plugins/MergerSite/languages/it.json b/plugins/MergerSite/languages/it.json new file mode 100644 index 00000000..d56c9817 --- /dev/null +++ b/plugins/MergerSite/languages/it.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Aggiungere %s nuovo sito ?", + "Added %s new site": "Sito %s aggiunto", + "Site deleted: %s": "Sito %s eliminato" +} diff --git a/plugins/MergerSite/languages/pt-br.json b/plugins/MergerSite/languages/pt-br.json new file mode 100644 index 00000000..cdc298cb --- /dev/null +++ b/plugins/MergerSite/languages/pt-br.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Adicionar %s novo site?", + "Added %s new site": "Site %s adicionado", + "Site deleted: %s": "Site removido: %s" +} diff --git a/plugins/MergerSite/languages/tr.json b/plugins/MergerSite/languages/tr.json new file mode 100644 index 00000000..5afb3942 --- /dev/null +++ b/plugins/MergerSite/languages/tr.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "%s sitesi eklensin mi?", + "Added %s new site": "%s sitesi eklendi", + "Site deleted: %s": "%s sitesi silindi" +} diff --git a/plugins/MergerSite/languages/zh-tw.json b/plugins/MergerSite/languages/zh-tw.json new file mode 100644 index 00000000..a0684e63 --- /dev/null +++ b/plugins/MergerSite/languages/zh-tw.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "添加新網站: %s?", + "Added %s new site": "已添加到新網站:%s", + "Site deleted: %s": "網站已刪除:%s" +} diff --git a/plugins/MergerSite/languages/zh.json b/plugins/MergerSite/languages/zh.json new file mode 100644 index 00000000..127044e6 --- /dev/null +++ b/plugins/MergerSite/languages/zh.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "添加新站点: %s?", + "Added %s new site": "已添加到新站点:%s", + "Site deleted: %s": "站点已删除:%s" +} diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py new file mode 100644 index 00000000..461401ac --- /dev/null +++ b/plugins/Newsfeed/NewsfeedPlugin.py @@ -0,0 +1,188 @@ +import time +import re + +from Plugin import PluginManager +from Db.DbQuery import DbQuery +from Debug import Debug +from util import helper + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def formatSiteInfo(self, site, create_user=True): + site_info = super(UiWebsocketPlugin, self).formatSiteInfo(site, create_user=create_user) + feed_following = self.user.sites.get(site.address, {}).get("follow", None) + if feed_following == None: + site_info["feed_follow_num"] = None + else: + site_info["feed_follow_num"] = len(feed_following) + return site_info + + def actionFeedFollow(self, to, feeds): + self.user.setFeedFollow(self.site.address, feeds) + self.user.save() + self.response(to, "ok") + + def actionFeedListFollow(self, to): + feeds = self.user.sites[self.site.address].get("follow", {}) + self.response(to, feeds) + + def actionFeedQuery(self, to, limit=10, day_limit=3): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "FeedQuery not allowed") + + from Site import SiteManager + rows = [] + stats = [] + + total_s = time.time() + num_sites = 0 + + for address, site_data in list(self.user.sites.items()): + feeds = site_data.get("follow") + if not feeds: + continue + if type(feeds) is not dict: + self.log.debug("Invalid feed for site %s" % address) + continue + num_sites += 1 + for name, query_set in feeds.items(): + site = SiteManager.site_manager.get(address) + if not site or not site.storage.has_db: + continue + + s = time.time() + try: + query_raw, params = query_set + query_parts = re.split(r"UNION(?:\s+ALL|)", query_raw) + for i, query_part in enumerate(query_parts): + db_query = DbQuery(query_part) + if day_limit: + where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit) + if "WHERE" in query_part: + query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part) + else: + query_part += where + query_parts[i] = query_part + query = " UNION ".join(query_parts) + + if ":params" in query: + query_params = map(helper.sqlquote, params) + query = query.replace(":params", ",".join(query_params)) + + res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit) + + except Exception as err: # Log error + self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) + stats.append({"site": site.address, "feed_name": name, "error": str(err)}) + continue + + for row in res: + row = dict(row) + if not isinstance(row["date_added"], (int, float, complex)): + self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"])) + continue + if row["date_added"] > 1000000000000: # Formatted as millseconds + row["date_added"] = row["date_added"] / 1000 + if "date_added" not in row or row["date_added"] > time.time() + 120: + self.log.debug("Newsfeed item from the future from from site %s" % address) + continue # Feed item is in the future, skip it + row["site"] = address + row["feed_name"] = name + rows.append(row) + stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)}) + time.sleep(0.001) + return self.response(to, {"rows": rows, "stats": stats, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3)}) + + def parseSearch(self, search): + parts = re.split("(site|type):", search) + if len(parts) > 1: # Found filter + search_text = parts[0] + parts = [part.strip() for part in parts] + filters = dict(zip(parts[1::2], parts[2::2])) + else: + search_text = search + filters = {} + return [search_text, filters] + + def actionFeedSearch(self, to, search, limit=30, day_limit=30): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "FeedSearch not allowed") + + from Site import SiteManager + rows = [] + stats = [] + num_sites = 0 + total_s = time.time() + + search_text, filters = self.parseSearch(search) + + for address, site in SiteManager.site_manager.list().items(): + if not site.storage.has_db: + continue + + if "site" in filters: + if filters["site"].lower() not in [site.address, site.content_manager.contents["content.json"].get("title").lower()]: + continue + + if site.storage.db: # Database loaded + feeds = site.storage.db.schema.get("feeds") + else: + try: + feeds = site.storage.loadJson("dbschema.json").get("feeds") + except: + continue + + if not feeds: + continue + + num_sites += 1 + + for name, query in feeds.items(): + s = time.time() + try: + db_query = DbQuery(query) + + params = [] + # Filters + if search_text: + db_query.wheres.append("(%s LIKE ? OR %s LIKE ?)" % (db_query.fields["body"], db_query.fields["title"])) + search_like = "%" + search_text.replace(" ", "%") + "%" + params.append(search_like) + params.append(search_like) + if filters.get("type") and filters["type"] not in query: + continue + + if day_limit: + db_query.wheres.append( + "%s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit) + ) + + # Order + db_query.parts["ORDER BY"] = "date_added DESC" + db_query.parts["LIMIT"] = str(limit) + + res = site.storage.query(str(db_query), params) + except Exception as err: + self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) + stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query}) + continue + for row in res: + row = dict(row) + if not row["date_added"] or row["date_added"] > time.time() + 120: + continue # Feed item is in the future, skip it + row["site"] = address + row["feed_name"] = name + rows.append(row) + stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)}) + return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3), "stats": stats}) + + +@PluginManager.registerTo("User") +class UserPlugin(object): + # Set queries that user follows + def setFeedFollow(self, address, feeds): + site_data = self.getSiteData(address) + site_data["follow"] = feeds + self.save() + return site_data diff --git a/plugins/Newsfeed/__init__.py b/plugins/Newsfeed/__init__.py new file mode 100644 index 00000000..6e624df6 --- /dev/null +++ b/plugins/Newsfeed/__init__.py @@ -0,0 +1 @@ +from . import NewsfeedPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py new file mode 100644 index 00000000..e7945d93 --- /dev/null +++ b/plugins/OptionalManager/ContentDbPlugin.py @@ -0,0 +1,407 @@ +import time +import collections +import itertools +import re + +import gevent + +from util import helper +from Plugin import PluginManager +from Config import config +from Debug import Debug + +if "content_db" not in locals().keys(): # To keep between module reloads + content_db = None + + +@PluginManager.registerTo("ContentDb") +class ContentDbPlugin(object): + def __init__(self, *args, **kwargs): + global content_db + content_db = self + self.filled = {} # Site addresses that already filled from content.json + self.need_filling = False # file_optional table just created, fill data from content.json files + self.time_peer_numbers_updated = 0 + self.my_optional_files = {} # Last 50 site_address/inner_path called by fileWrite (auto-pinning these files) + self.optional_files = collections.defaultdict(dict) + self.optional_files_loading = False + helper.timer(60 * 5, self.checkOptionalLimit) + super(ContentDbPlugin, self).__init__(*args, **kwargs) + + def getSchema(self): + schema = super(ContentDbPlugin, self).getSchema() + + # Need file_optional table + schema["tables"]["file_optional"] = { + "cols": [ + ["file_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"], + ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"], + ["inner_path", "TEXT"], + ["hash_id", "INTEGER"], + ["size", "INTEGER"], + ["peer", "INTEGER DEFAULT 0"], + ["uploaded", "INTEGER DEFAULT 0"], + ["is_downloaded", "INTEGER DEFAULT 0"], + ["is_pinned", "INTEGER DEFAULT 0"], + ["time_added", "INTEGER DEFAULT 0"], + ["time_downloaded", "INTEGER DEFAULT 0"], + ["time_accessed", "INTEGER DEFAULT 0"] + ], + "indexes": [ + "CREATE UNIQUE INDEX file_optional_key ON file_optional (site_id, inner_path)", + "CREATE INDEX is_downloaded ON file_optional (is_downloaded)" + ], + "schema_changed": 11 + } + + return schema + + def initSite(self, site): + super(ContentDbPlugin, self).initSite(site) + if self.need_filling: + self.fillTableFileOptional(site) + if not self.optional_files_loading: + gevent.spawn_later(1, self.loadFilesOptional) + self.optional_files_loading = True + + def checkTables(self): + changed_tables = super(ContentDbPlugin, self).checkTables() + if "file_optional" in changed_tables: + self.need_filling = True + return changed_tables + + # Load optional files ending + def loadFilesOptional(self): + s = time.time() + num = 0 + total = 0 + total_downloaded = 0 + res = content_db.execute("SELECT site_id, inner_path, size, is_downloaded FROM file_optional") + site_sizes = collections.defaultdict(lambda: collections.defaultdict(int)) + for row in res: + self.optional_files[row["site_id"]][row["inner_path"][-8:]] = 1 + num += 1 + + # Update site size stats + site_sizes[row["site_id"]]["size_optional"] += row["size"] + if row["is_downloaded"]: + site_sizes[row["site_id"]]["optional_downloaded"] += row["size"] + + # Site site size stats to sites.json settings + site_ids_reverse = {val: key for key, val in self.site_ids.items()} + for site_id, stats in site_sizes.items(): + site_address = site_ids_reverse.get(site_id) + if not site_address: + self.log.error("Not found site_id: %s" % site_id) + continue + site = self.sites[site_address] + site.settings["size_optional"] = stats["size_optional"] + site.settings["optional_downloaded"] = stats["optional_downloaded"] + total += stats["size_optional"] + total_downloaded += stats["optional_downloaded"] + + self.log.debug( + "Loaded %s optional files: %.2fMB, downloaded: %.2fMB in %.3fs" % + (num, float(total) / 1024 / 1024, float(total_downloaded) / 1024 / 1024, time.time() - s) + ) + + if self.need_filling and self.getOptionalLimitBytes() >= 0 and self.getOptionalLimitBytes() < total_downloaded: + limit_bytes = self.getOptionalLimitBytes() + limit_new = round((float(total_downloaded) / 1024 / 1024 / 1024) * 1.1, 2) # Current limit + 10% + self.log.debug( + "First startup after update and limit is smaller than downloaded files size (%.2fGB), increasing it from %.2fGB to %.2fGB" % + (float(total_downloaded) / 1024 / 1024 / 1024, float(limit_bytes) / 1024 / 1024 / 1024, limit_new) + ) + config.saveValue("optional_limit", limit_new) + config.optional_limit = str(limit_new) + + # Predicts if the file is optional + def isOptionalFile(self, site_id, inner_path): + return self.optional_files[site_id].get(inner_path[-8:]) + + # Fill file_optional table with optional files found in sites + def fillTableFileOptional(self, site): + s = time.time() + site_id = self.site_ids.get(site.address) + if not site_id: + return False + cur = self.getCursor() + res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id) + num = 0 + for row in res.fetchall(): + content = site.content_manager.contents[row["inner_path"]] + try: + num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur) + except Exception as err: + self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err)) + cur.close() + + # Set my files to pinned + from User import UserManager + user = UserManager.user_manager.get() + if not user: + user = UserManager.user_manager.create() + auth_address = user.getAuthAddress(site.address) + self.execute( + "UPDATE file_optional SET is_pinned = 1 WHERE site_id = :site_id AND inner_path LIKE :inner_path", + {"site_id": site_id, "inner_path": "%%/%s/%%" % auth_address} + ) + + self.log.debug( + "Filled file_optional table for %s in %.3fs (loaded: %s, is_pinned: %s)" % + (site.address, time.time() - s, num, self.cur.cursor.rowcount) + ) + self.filled[site.address] = True + + def setContentFilesOptional(self, site, content_inner_path, content, cur=None): + if not cur: + cur = self + + num = 0 + site_id = self.site_ids[site.address] + content_inner_dir = helper.getDirname(content_inner_path) + for relative_inner_path, file in content.get("files_optional", {}).items(): + file_inner_path = content_inner_dir + relative_inner_path + hash_id = int(file["sha512"][0:4], 16) + if hash_id in site.content_manager.hashfield: + is_downloaded = 1 + else: + is_downloaded = 0 + if site.address + "/" + content_inner_dir in self.my_optional_files: + is_pinned = 1 + else: + is_pinned = 0 + cur.insertOrUpdate("file_optional", { + "hash_id": hash_id, + "size": int(file["size"]) + }, { + "site_id": site_id, + "inner_path": file_inner_path + }, oninsert={ + "time_added": int(time.time()), + "time_downloaded": int(time.time()) if is_downloaded else 0, + "is_downloaded": is_downloaded, + "peer": is_downloaded, + "is_pinned": is_pinned + }) + self.optional_files[site_id][file_inner_path[-8:]] = 1 + num += 1 + + return num + + def setContent(self, site, inner_path, content, size=0): + super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size) + old_content = site.content_manager.contents.get(inner_path, {}) + if (not self.need_filling or self.filled.get(site.address)) and ("files_optional" in content or "files_optional" in old_content): + self.setContentFilesOptional(site, inner_path, content) + # Check deleted files + if old_content: + old_files = old_content.get("files_optional", {}).keys() + new_files = content.get("files_optional", {}).keys() + content_inner_dir = helper.getDirname(inner_path) + deleted = [content_inner_dir + key for key in old_files if key not in new_files] + if deleted: + site_id = self.site_ids[site.address] + self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": deleted}) + + def deleteContent(self, site, inner_path): + content = site.content_manager.contents.get(inner_path) + if content and "files_optional" in content: + site_id = self.site_ids[site.address] + content_inner_dir = helper.getDirname(inner_path) + optional_inner_paths = [ + content_inner_dir + relative_inner_path + for relative_inner_path in content.get("files_optional", {}).keys() + ] + self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": optional_inner_paths}) + super(ContentDbPlugin, self).deleteContent(site, inner_path) + + def updatePeerNumbers(self): + s = time.time() + num_file = 0 + num_updated = 0 + num_site = 0 + for site in list(self.sites.values()): + if not site.content_manager.has_optional_files: + continue + if not site.isServing(): + continue + has_updated_hashfield = next(( + peer + for peer in site.peers.values() + if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated + ), None) + + if not has_updated_hashfield and site.content_manager.hashfield.time_changed < self.time_peer_numbers_updated: + continue + + hashfield_peers = itertools.chain.from_iterable( + peer.hashfield.storage + for peer in site.peers.values() + if peer.has_hashfield + ) + peer_nums = collections.Counter( + itertools.chain( + hashfield_peers, + site.content_manager.hashfield + ) + ) + + site_id = self.site_ids[site.address] + if not site_id: + continue + + res = self.execute("SELECT file_id, hash_id, peer FROM file_optional WHERE ?", {"site_id": site_id}) + updates = {} + for row in res: + peer_num = peer_nums.get(row["hash_id"], 0) + if peer_num != row["peer"]: + updates[row["file_id"]] = peer_num + + for file_id, peer_num in updates.items(): + self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id)) + + num_updated += len(updates) + num_file += len(peer_nums) + num_site += 1 + + self.time_peer_numbers_updated = time.time() + self.log.debug("%s/%s peer number for %s site updated in %.3fs" % (num_updated, num_file, num_site, time.time() - s)) + + def queryDeletableFiles(self): + # First return the files with atleast 10 seeder and not accessed in last week + query = """ + SELECT * FROM file_optional + WHERE peer > 10 AND %s + ORDER BY time_accessed < %s DESC, uploaded / size + """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7)) + limit_start = 0 + while 1: + num = 0 + res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) + for row in res: + yield row + num += 1 + if num < 50: + break + limit_start += 50 + + self.log.debug("queryDeletableFiles returning less-seeded files") + + # Then return files less seeder but still not accessed in last week + query = """ + SELECT * FROM file_optional + WHERE peer <= 10 AND %s + ORDER BY peer DESC, time_accessed < %s DESC, uploaded / size + """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7)) + limit_start = 0 + while 1: + num = 0 + res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) + for row in res: + yield row + num += 1 + if num < 50: + break + limit_start += 50 + + self.log.debug("queryDeletableFiles returning everyting") + + # At the end return all files + query = """ + SELECT * FROM file_optional + WHERE peer <= 10 AND %s + ORDER BY peer DESC, time_accessed, uploaded / size + """ % self.getOptionalUsedWhere() + limit_start = 0 + while 1: + num = 0 + res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) + for row in res: + yield row + num += 1 + if num < 50: + break + limit_start += 50 + + def getOptionalLimitBytes(self): + if config.optional_limit.endswith("%"): + limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit)) + limit_bytes = helper.getFreeSpace() * (limit_percent / 100) + else: + limit_bytes = float(re.sub("[^0-9.]", "", config.optional_limit)) * 1024 * 1024 * 1024 + return limit_bytes + + def getOptionalUsedWhere(self): + maxsize = config.optional_limit_exclude_minsize * 1024 * 1024 + query = "is_downloaded = 1 AND is_pinned = 0 AND size < %s" % maxsize + + # Don't delete optional files from owned sites + my_site_ids = [] + for address, site in self.sites.items(): + if site.settings["own"]: + my_site_ids.append(str(self.site_ids[address])) + + if my_site_ids: + query += " AND site_id NOT IN (%s)" % ", ".join(my_site_ids) + return query + + def getOptionalUsedBytes(self): + size = self.execute("SELECT SUM(size) FROM file_optional WHERE %s" % self.getOptionalUsedWhere()).fetchone()[0] + if not size: + size = 0 + return size + + def getOptionalNeedDelete(self, size): + if config.optional_limit.endswith("%"): + limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit)) + need_delete = size - ((helper.getFreeSpace() + size) * (limit_percent / 100)) + else: + need_delete = size - self.getOptionalLimitBytes() + return need_delete + + def checkOptionalLimit(self, limit=None): + if not limit: + limit = self.getOptionalLimitBytes() + + if limit < 0: + self.log.debug("Invalid limit for optional files: %s" % limit) + return False + + size = self.getOptionalUsedBytes() + + need_delete = self.getOptionalNeedDelete(size) + + self.log.debug( + "Optional size: %.1fMB/%.1fMB, Need delete: %.1fMB" % + (float(size) / 1024 / 1024, float(limit) / 1024 / 1024, float(need_delete) / 1024 / 1024) + ) + if need_delete <= 0: + return False + + self.updatePeerNumbers() + + site_ids_reverse = {val: key for key, val in self.site_ids.items()} + deleted_file_ids = [] + for row in self.queryDeletableFiles(): + site_address = site_ids_reverse.get(row["site_id"]) + site = self.sites.get(site_address) + if not site: + self.log.error("No site found for id: %s" % row["site_id"]) + continue + site.log.debug("Deleting %s %.3f MB left" % (row["inner_path"], float(need_delete) / 1024 / 1024)) + deleted_file_ids.append(row["file_id"]) + try: + site.content_manager.optionalRemoved(row["inner_path"], row["hash_id"], row["size"]) + site.storage.delete(row["inner_path"]) + need_delete -= row["size"] + except Exception as err: + site.log.error("Error deleting %s: %s" % (row["inner_path"], err)) + + if need_delete <= 0: + break + + cur = self.getCursor() + for file_id in deleted_file_ids: + cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id}) + cur.close() diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py new file mode 100644 index 00000000..909caa31 --- /dev/null +++ b/plugins/OptionalManager/OptionalManagerPlugin.py @@ -0,0 +1,239 @@ +import time +import re +import collections + +import gevent + +from util import helper +from Plugin import PluginManager +from . import ContentDbPlugin + + +# We can only import plugin host clases after the plugins are loaded +@PluginManager.afterLoad +def importPluginnedClasses(): + global config + from Config import config + + +def processAccessLog(): + if access_log: + content_db = ContentDbPlugin.content_db + now = int(time.time()) + num = 0 + for site_id in access_log: + content_db.execute( + "UPDATE file_optional SET time_accessed = %s WHERE ?" % now, + {"site_id": site_id, "inner_path": list(access_log[site_id].keys())} + ) + num += len(access_log[site_id]) + access_log.clear() + + +def processRequestLog(): + if request_log: + content_db = ContentDbPlugin.content_db + cur = content_db.getCursor() + num = 0 + for site_id in request_log: + for inner_path, uploaded in request_log[site_id].items(): + content_db.execute( + "UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded, + {"site_id": site_id, "inner_path": inner_path} + ) + num += 1 + request_log.clear() + + +if "access_log" not in locals().keys(): # To keep between module reloads + access_log = collections.defaultdict(dict) # {site_id: {inner_path1: 1, inner_path2: 1...}} + request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}} + helper.timer(61, processAccessLog) + helper.timer(60, processRequestLog) + + +@PluginManager.registerTo("ContentManager") +class ContentManagerPlugin(object): + def __init__(self, *args, **kwargs): + self.cache_is_pinned = {} + super(ContentManagerPlugin, self).__init__(*args, **kwargs) + + def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): + if "|" in inner_path: # Big file piece + file_inner_path, file_range = inner_path.split("|") + else: + file_inner_path = inner_path + + self.contents.db.executeDelayed( + "UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 0", + {"now": int(time.time()), "site_id": self.contents.db.site_ids[self.site.address], "inner_path": file_inner_path} + ) + + return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own) + + def optionalRemoved(self, inner_path, hash_id, size=None): + self.contents.db.execute( + "UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 1", + {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} + ) + + if self.contents.db.cur.cursor.rowcount > 0: + back = super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size) + # Re-add to hashfield if we have other file with the same hash_id + if self.isDownloaded(hash_id=hash_id, force_check_db=True): + self.hashfield.appendHashId(hash_id) + else: + back = False + self.cache_is_pinned = {} + return back + + def optionalRenamed(self, inner_path_old, inner_path_new): + back = super(ContentManagerPlugin, self).optionalRenamed(inner_path_old, inner_path_new) + self.cache_is_pinned = {} + self.contents.db.execute( + "UPDATE file_optional SET inner_path = :inner_path_new WHERE site_id = :site_id AND inner_path = :inner_path_old", + {"site_id": self.contents.db.site_ids[self.site.address], "inner_path_old": inner_path_old, "inner_path_new": inner_path_new} + ) + return back + + def isDownloaded(self, inner_path=None, hash_id=None, force_check_db=False): + if hash_id and not force_check_db and hash_id not in self.hashfield: + return False + + if inner_path: + res = self.contents.db.execute( + "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1", + {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} + ) + else: + res = self.contents.db.execute( + "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND hash_id = :hash_id AND is_downloaded = 1 LIMIT 1", + {"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id} + ) + row = res.fetchone() + if row and row["is_downloaded"]: + return True + else: + return False + + def isPinned(self, inner_path): + if inner_path in self.cache_is_pinned: + self.site.log.debug("Cached is pinned: %s" % inner_path) + return self.cache_is_pinned[inner_path] + + res = self.contents.db.execute( + "SELECT is_pinned FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1", + {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} + ) + row = res.fetchone() + + if row and row[0]: + is_pinned = True + else: + is_pinned = False + + self.cache_is_pinned[inner_path] = is_pinned + self.site.log.debug("Cache set is pinned: %s %s" % (inner_path, is_pinned)) + + return is_pinned + + def setPin(self, inner_path, is_pinned): + content_db = self.contents.db + site_id = content_db.site_ids[self.site.address] + content_db.execute("UPDATE file_optional SET is_pinned = %d WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path}) + self.cache_is_pinned = {} + + def optionalDelete(self, inner_path): + if self.isPinned(inner_path): + self.site.log.debug("Skip deleting pinned optional file: %s" % inner_path) + return False + else: + return super(ContentManagerPlugin, self).optionalDelete(inner_path) + + +@PluginManager.registerTo("WorkerManager") +class WorkerManagerPlugin(object): + def doneTask(self, task): + super(WorkerManagerPlugin, self).doneTask(task) + + if task["optional_hash_id"] and not self.tasks: # Execute delayed queries immedietly after tasks finished + ContentDbPlugin.content_db.processDelayed() + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def parsePath(self, path): + global access_log + path_parts = super(UiRequestPlugin, self).parsePath(path) + if path_parts: + site_id = ContentDbPlugin.content_db.site_ids.get(path_parts["request_address"]) + if site_id: + if ContentDbPlugin.content_db.isOptionalFile(site_id, path_parts["inner_path"]): + access_log[site_id][path_parts["inner_path"]] = 1 + return path_parts + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def actionGetFile(self, params): + stats = super(FileRequestPlugin, self).actionGetFile(params) + self.recordFileRequest(params["site"], params["inner_path"], stats) + return stats + + def actionStreamFile(self, params): + stats = super(FileRequestPlugin, self).actionStreamFile(params) + self.recordFileRequest(params["site"], params["inner_path"], stats) + return stats + + def recordFileRequest(self, site_address, inner_path, stats): + if not stats: + # Only track the last request of files + return False + site_id = ContentDbPlugin.content_db.site_ids[site_address] + if site_id and ContentDbPlugin.content_db.isOptionalFile(site_id, inner_path): + request_log[site_id][inner_path] += stats["bytes_sent"] + + +@PluginManager.registerTo("Site") +class SitePlugin(object): + def isDownloadable(self, inner_path): + is_downloadable = super(SitePlugin, self).isDownloadable(inner_path) + if is_downloadable: + return is_downloadable + + for path in self.settings.get("optional_help", {}).keys(): + if inner_path.startswith(path): + return True + + return False + + def fileForgot(self, inner_path): + if "|" in inner_path and self.content_manager.isPinned(re.sub(r"\|.*", "", inner_path)): + self.log.debug("File %s is pinned, no fileForgot" % inner_path) + return False + else: + return super(SitePlugin, self).fileForgot(inner_path) + + def fileDone(self, inner_path): + if "|" in inner_path and self.bad_files.get(inner_path, 0) > 5: # Idle optional file done + inner_path_file = re.sub(r"\|.*", "", inner_path) + num_changed = 0 + for key, val in self.bad_files.items(): + if key.startswith(inner_path_file) and val > 1: + self.bad_files[key] = 1 + num_changed += 1 + self.log.debug("Idle optional file piece done, changed retry number of %s pieces." % num_changed) + if num_changed: + gevent.spawn(self.retryBadFiles) + + return super(SitePlugin, self).fileDone(inner_path) + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("OptionalManager plugin") + group.add_argument('--optional_limit', help='Limit total size of optional files', default="10%", metavar="GB or free space %") + group.add_argument('--optional_limit_exclude_minsize', help='Exclude files larger than this limit from optional size limit calculation', default=20, metavar="MB", type=int) + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py new file mode 100644 index 00000000..4bd44695 --- /dev/null +++ b/plugins/OptionalManager/Test/TestOptionalManager.py @@ -0,0 +1,158 @@ +import copy + +import pytest + + +@pytest.mark.usefixtures("resetSettings") +class TestOptionalManager: + def testDbFill(self, site): + contents = site.content_manager.contents + assert len(site.content_manager.hashfield) > 0 + assert contents.db.execute("SELECT COUNT(*) FROM file_optional WHERE is_downloaded = 1").fetchone()[0] == len(site.content_manager.hashfield) + + def testSetContent(self, site): + contents = site.content_manager.contents + + # Add new file + new_content = copy.deepcopy(contents["content.json"]) + new_content["files_optional"]["testfile"] = { + "size": 1234, + "sha512": "aaaabbbbcccc" + } + num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] + contents["content.json"] = new_content + assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] > num_optional_files_before + + # Remove file + new_content = copy.deepcopy(contents["content.json"]) + del new_content["files_optional"]["testfile"] + num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] + contents["content.json"] = new_content + assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before + + def testDeleteContent(self, site): + contents = site.content_manager.contents + num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] + del contents["content.json"] + assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before + + def testVerifyFiles(self, site): + contents = site.content_manager.contents + + # Add new file + new_content = copy.deepcopy(contents["content.json"]) + new_content["files_optional"]["testfile"] = { + "size": 1234, + "sha512": "aaaabbbbcccc" + } + contents["content.json"] = new_content + file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() + assert not file_row["is_downloaded"] + + # Write file from outside of ZeroNet + site.storage.open("testfile", "wb").write(b"A" * 1234) # For quick check hash does not matter only file size + + hashfield_len_before = len(site.content_manager.hashfield) + site.storage.verifyFiles(quick_check=True) + assert len(site.content_manager.hashfield) == hashfield_len_before + 1 + + file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() + assert file_row["is_downloaded"] + + # Delete file outside of ZeroNet + site.storage.delete("testfile") + site.storage.verifyFiles(quick_check=True) + file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() + assert not file_row["is_downloaded"] + + def testVerifyFilesSameHashId(self, site): + contents = site.content_manager.contents + + new_content = copy.deepcopy(contents["content.json"]) + + # Add two files with same hashid (first 4 character) + new_content["files_optional"]["testfile1"] = { + "size": 1234, + "sha512": "aaaabbbbcccc" + } + new_content["files_optional"]["testfile2"] = { + "size": 2345, + "sha512": "aaaabbbbdddd" + } + contents["content.json"] = new_content + + assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd") + + # Write files from outside of ZeroNet (For quick check hash does not matter only file size) + site.storage.open("testfile1", "wb").write(b"A" * 1234) + site.storage.open("testfile2", "wb").write(b"B" * 2345) + + site.storage.verifyFiles(quick_check=True) + + # Make sure that both is downloaded + assert site.content_manager.isDownloaded("testfile1") + assert site.content_manager.isDownloaded("testfile2") + assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") in site.content_manager.hashfield + + # Delete one of the files + site.storage.delete("testfile1") + site.storage.verifyFiles(quick_check=True) + assert not site.content_manager.isDownloaded("testfile1") + assert site.content_manager.isDownloaded("testfile2") + assert site.content_manager.hashfield.getHashId("aaaabbbbdddd") in site.content_manager.hashfield + + def testIsPinned(self, site): + assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png") + site.content_manager.setPin("data/img/zerotalk-upvote.png", True) + assert site.content_manager.isPinned("data/img/zerotalk-upvote.png") + + assert len(site.content_manager.cache_is_pinned) == 1 + site.content_manager.cache_is_pinned = {} + assert site.content_manager.isPinned("data/img/zerotalk-upvote.png") + + def testBigfilePieceReset(self, site): + site.bad_files = { + "data/fake_bigfile.mp4|0-1024": 10, + "data/fake_bigfile.mp4|1024-2048": 10, + "data/fake_bigfile.mp4|2048-3064": 10 + } + site.onFileDone("data/fake_bigfile.mp4|0-1024") + assert site.bad_files["data/fake_bigfile.mp4|1024-2048"] == 1 + assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1 + + def testOptionalDelete(self, site): + contents = site.content_manager.contents + + site.content_manager.setPin("data/img/zerotalk-upvote.png", True) + site.content_manager.setPin("data/img/zeroid.png", False) + new_content = copy.deepcopy(contents["content.json"]) + del new_content["files_optional"]["data/img/zerotalk-upvote.png"] + del new_content["files_optional"]["data/img/zeroid.png"] + + assert site.storage.isFile("data/img/zerotalk-upvote.png") + assert site.storage.isFile("data/img/zeroid.png") + + site.storage.writeJson("content.json", new_content) + site.content_manager.loadContent("content.json", force=True) + + assert not site.storage.isFile("data/img/zeroid.png") + assert site.storage.isFile("data/img/zerotalk-upvote.png") + + def testOptionalRename(self, site): + contents = site.content_manager.contents + + site.content_manager.setPin("data/img/zerotalk-upvote.png", True) + new_content = copy.deepcopy(contents["content.json"]) + new_content["files_optional"]["data/img/zerotalk-upvote-new.png"] = new_content["files_optional"]["data/img/zerotalk-upvote.png"] + del new_content["files_optional"]["data/img/zerotalk-upvote.png"] + + assert site.storage.isFile("data/img/zerotalk-upvote.png") + assert site.content_manager.isPinned("data/img/zerotalk-upvote.png") + + site.storage.writeJson("content.json", new_content) + site.content_manager.loadContent("content.json", force=True) + + assert not site.storage.isFile("data/img/zerotalk-upvote.png") + assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png") + assert site.content_manager.isPinned("data/img/zerotalk-upvote-new.png") + assert site.storage.isFile("data/img/zerotalk-upvote-new.png") diff --git a/plugins/OptionalManager/Test/conftest.py b/plugins/OptionalManager/Test/conftest.py new file mode 100644 index 00000000..8c1df5b2 --- /dev/null +++ b/plugins/OptionalManager/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/OptionalManager/Test/pytest.ini b/plugins/OptionalManager/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/OptionalManager/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py new file mode 100644 index 00000000..52b0f749 --- /dev/null +++ b/plugins/OptionalManager/UiWebsocketPlugin.py @@ -0,0 +1,382 @@ +import re +import time +import html + +import gevent + +from Plugin import PluginManager +from Config import config +from util import helper +from Translate import Translate + +if "_" not in locals(): + _ = Translate("plugins/OptionalManager/languages/") + +bigfile_sha512_cache = {} + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def __init__(self, *args, **kwargs): + self.time_peer_numbers_updated = 0 + super(UiWebsocketPlugin, self).__init__(*args, **kwargs) + + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): + # Add file to content.db and set it as pinned + content_db = self.site.content_manager.contents.db + content_inner_dir = helper.getDirname(inner_path) + content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time() + if len(content_db.my_optional_files) > 50: # Keep only last 50 + oldest_key = min( + iter(content_db.my_optional_files.keys()), + key=(lambda key: content_db.my_optional_files[key]) + ) + del content_db.my_optional_files[oldest_key] + + return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs) + + def updatePeerNumbers(self): + self.site.updateHashfield() + content_db = self.site.content_manager.contents.db + content_db.updatePeerNumbers() + self.site.updateWebsocket(peernumber_updated=True) + + def addBigfileInfo(self, row): + global bigfile_sha512_cache + + content_db = self.site.content_manager.contents.db + site = content_db.sites[row["address"]] + if not site.settings.get("has_bigfile"): + return False + + file_key = row["address"] + "/" + row["inner_path"] + sha512 = bigfile_sha512_cache.get(file_key) + file_info = None + if not sha512: + file_info = site.content_manager.getFileInfo(row["inner_path"]) + if not file_info or not file_info.get("piece_size"): + return False + sha512 = file_info["sha512"] + bigfile_sha512_cache[file_key] = sha512 + + if sha512 in site.storage.piecefields: + piecefield = site.storage.piecefields[sha512].tobytes() + else: + piecefield = None + + if piecefield: + row["pieces"] = len(piecefield) + row["pieces_downloaded"] = piecefield.count(b"\x01") + row["downloaded_percent"] = 100 * row["pieces_downloaded"] / row["pieces"] + if row["pieces_downloaded"]: + if row["pieces"] == row["pieces_downloaded"]: + row["bytes_downloaded"] = row["size"] + else: + if not file_info: + file_info = site.content_manager.getFileInfo(row["inner_path"]) + row["bytes_downloaded"] = row["pieces_downloaded"] * file_info.get("piece_size", 0) + else: + row["bytes_downloaded"] = 0 + + row["is_downloading"] = bool(next((inner_path for inner_path in site.bad_files if inner_path.startswith(row["inner_path"])), False)) + + # Add leech / seed stats + row["peer_seed"] = 0 + row["peer_leech"] = 0 + for peer in site.peers.values(): + if not peer.time_piecefields_updated or sha512 not in peer.piecefields: + continue + peer_piecefield = peer.piecefields[sha512].tobytes() + if not peer_piecefield: + continue + if peer_piecefield == b"\x01" * len(peer_piecefield): + row["peer_seed"] += 1 + else: + row["peer_leech"] += 1 + + # Add myself + if piecefield: + if row["pieces_downloaded"] == row["pieces"]: + row["peer_seed"] += 1 + else: + row["peer_leech"] += 1 + + return True + + # Optional file functions + + def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded"): + if not address: + address = self.site.address + + # Update peer numbers if necessary + content_db = self.site.content_manager.contents.db + if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5: + # Start in new thread to avoid blocking + self.time_peer_numbers_updated = time.time() + gevent.spawn(self.updatePeerNumbers) + + if address == "all" and "ADMIN" not in self.permissions: + return self.response(to, {"error": "Forbidden"}) + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + if not all([re.match("^[a-z_*/+-]+( DESC| ASC|)$", part.strip()) for part in orderby.split(",")]): + return self.response(to, "Invalid order_by") + + if type(limit) != int: + return self.response(to, "Invalid limit") + + back = [] + content_db = self.site.content_manager.contents.db + + wheres = {} + wheres_raw = [] + if "bigfile" in filter: + wheres["size >"] = 1024 * 1024 * 10 + if "downloaded" in filter: + wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)") + if "pinned" in filter: + wheres["is_pinned"] = 1 + + if address == "all": + join = "LEFT JOIN site USING (site_id)" + else: + wheres["site_id"] = content_db.site_ids[address] + join = "" + + if wheres_raw: + query_wheres_raw = "AND" + " AND ".join(wheres_raw) + else: + query_wheres_raw = "" + + query = "SELECT * FROM file_optional %s WHERE ? %s ORDER BY %s LIMIT %s" % (join, query_wheres_raw, orderby, limit) + + for row in content_db.execute(query, wheres): + row = dict(row) + if address != "all": + row["address"] = address + + if row["size"] > 1024 * 1024: + has_info = self.addBigfileInfo(row) + else: + has_info = False + + if not has_info: + if row["is_downloaded"]: + row["bytes_downloaded"] = row["size"] + row["downloaded_percent"] = 100 + else: + row["bytes_downloaded"] = 0 + row["downloaded_percent"] = 0 + + back.append(row) + self.response(to, back) + + def actionOptionalFileInfo(self, to, inner_path): + content_db = self.site.content_manager.contents.db + site_id = content_db.site_ids[self.site.address] + + # Update peer numbers if necessary + if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5: + # Start in new thread to avoid blocking + self.time_peer_numbers_updated = time.time() + gevent.spawn(self.updatePeerNumbers) + + query = "SELECT * FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1" + res = content_db.execute(query, {"site_id": site_id, "inner_path": inner_path}) + row = next(res, None) + if row: + row = dict(row) + if row["size"] > 1024 * 1024: + row["address"] = self.site.address + self.addBigfileInfo(row) + self.response(to, row) + else: + self.response(to, None) + + def setPin(self, inner_path, is_pinned, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return {"error": "Forbidden"} + + site = self.server.sites[address] + site.content_manager.setPin(inner_path, is_pinned) + + return "ok" + + def actionOptionalFilePin(self, to, inner_path, address=None): + if type(inner_path) is not list: + inner_path = [inner_path] + back = self.setPin(inner_path, 1, address) + num_file = len(inner_path) + if back == "ok": + if num_file == 1: + self.cmd("notification", ["done", _["Pinned %s"] % html.escape(helper.getFilename(inner_path[0])), 5000]) + else: + self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000]) + self.response(to, back) + + def actionOptionalFileUnpin(self, to, inner_path, address=None): + if type(inner_path) is not list: + inner_path = [inner_path] + back = self.setPin(inner_path, 0, address) + num_file = len(inner_path) + if back == "ok": + if num_file == 1: + self.cmd("notification", ["done", _["Removed pin from %s"] % html.escape(helper.getFilename(inner_path[0])), 5000]) + else: + self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000]) + self.response(to, back) + + def actionOptionalFileDelete(self, to, inner_path, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + content_db = site.content_manager.contents.db + site_id = content_db.site_ids[site.address] + + res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path, "is_downloaded": 1}) + row = next(res, None) + + if not row: + return self.response(to, {"error": "Not found in content.db"}) + + removed = site.content_manager.optionalRemoved(inner_path, row["hash_id"], row["size"]) + # if not removed: + # return self.response(to, {"error": "Not found in hash_id: %s" % row["hash_id"]}) + + content_db.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"site_id": site_id, "inner_path": inner_path}) + + try: + site.storage.delete(inner_path) + except Exception as err: + return self.response(to, {"error": "File delete error: %s" % err}) + site.updateWebsocket(file_delete=inner_path) + + if inner_path in site.content_manager.cache_is_pinned: + site.content_manager.cache_is_pinned = {} + + self.response(to, "ok") + + # Limit functions + + def actionOptionalLimitStats(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Forbidden") + + back = {} + back["limit"] = config.optional_limit + back["used"] = self.site.content_manager.contents.db.getOptionalUsedBytes() + back["free"] = helper.getFreeSpace() + + self.response(to, back) + + def actionOptionalLimitSet(self, to, limit): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, {"error": "Forbidden"}) + config.optional_limit = re.sub("\.0+$", "", limit) # Remove unnecessary digits from end + config.saveValue("optional_limit", limit) + self.response(to, "ok") + + # Distribute help functions + + def actionOptionalHelpList(self, to, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + self.response(to, site.settings.get("optional_help", {})) + + def actionOptionalHelp(self, to, directory, title, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + content_db = site.content_manager.contents.db + site_id = content_db.site_ids[address] + + if "optional_help" not in site.settings: + site.settings["optional_help"] = {} + + stats = content_db.execute( + "SELECT COUNT(*) AS num, SUM(size) AS size FROM file_optional WHERE site_id = :site_id AND inner_path LIKE :inner_path", + {"site_id": site_id, "inner_path": directory + "%"} + ).fetchone() + stats = dict(stats) + + if not stats["size"]: + stats["size"] = 0 + if not stats["num"]: + stats["num"] = 0 + + self.cmd("notification", [ + "done", + _["You started to help distribute %s.
Directory: %s"] % + (html.escape(title), html.escape(directory)), + 10000 + ]) + + site.settings["optional_help"][directory] = title + + self.response(to, dict(stats)) + + def actionOptionalHelpRemove(self, to, directory, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + try: + del site.settings["optional_help"][directory] + self.response(to, "ok") + except Exception: + self.response(to, {"error": "Not found"}) + + def cbOptionalHelpAll(self, to, site, value): + site.settings["autodownloadoptional"] = value + self.response(to, value) + + def actionOptionalHelpAll(self, to, value, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + if value: + if "ADMIN" in self.site.settings["permissions"]: + self.cbOptionalHelpAll(to, site, True) + else: + site_title = site.content_manager.contents["content.json"].get("title", address) + self.cmd( + "confirm", + [ + _["Help distribute all new optional files on site %s"] % html.escape(site_title), + _["Yes, I want to help!"] + ], + lambda res: self.cbOptionalHelpAll(to, site, True) + ) + else: + site.settings["autodownloadoptional"] = False + self.response(to, False) diff --git a/plugins/OptionalManager/__init__.py b/plugins/OptionalManager/__init__.py new file mode 100644 index 00000000..1f0ad2dd --- /dev/null +++ b/plugins/OptionalManager/__init__.py @@ -0,0 +1 @@ +from . import OptionalManagerPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/languages/es.json b/plugins/OptionalManager/languages/es.json new file mode 100644 index 00000000..32ae46ae --- /dev/null +++ b/plugins/OptionalManager/languages/es.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Archivos %s fijados", + "Removed pin from %s files": "Archivos %s que no estan fijados", + "You started to help distribute %s.
Directory: %s": "Tu empezaste a ayudar a distribuir %s.
Directorio: %s", + "Help distribute all new optional files on site %s": "Ayude a distribuir todos los archivos opcionales en el sitio %s", + "Yes, I want to help!": "¡Si, yo quiero ayudar!" +} diff --git a/plugins/OptionalManager/languages/fr.json b/plugins/OptionalManager/languages/fr.json new file mode 100644 index 00000000..47a563dc --- /dev/null +++ b/plugins/OptionalManager/languages/fr.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Fichiers %s épinglés", + "Removed pin from %s files": "Fichiers %s ne sont plus épinglés", + "You started to help distribute %s.
Directory: %s": "Vous avez commencé à aider à distribuer %s.
Dossier : %s", + "Help distribute all new optional files on site %s": "Aider à distribuer tous les fichiers optionnels du site %s", + "Yes, I want to help!": "Oui, je veux aider !" +} diff --git a/plugins/OptionalManager/languages/hu.json b/plugins/OptionalManager/languages/hu.json new file mode 100644 index 00000000..7a23b86c --- /dev/null +++ b/plugins/OptionalManager/languages/hu.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "%s fájl rögzítve", + "Removed pin from %s files": "%s fájl rögzítés eltávolítva", + "You started to help distribute %s.
Directory: %s": "Új segítség a terjesztésben: %s.
Könyvtár: %s", + "Help distribute all new optional files on site %s": "Segítség az összes új opcionális fájl terjesztésében az %s oldalon", + "Yes, I want to help!": "Igen, segíteni akarok!" +} diff --git a/plugins/OptionalManager/languages/pt-br.json b/plugins/OptionalManager/languages/pt-br.json new file mode 100644 index 00000000..21d90cc0 --- /dev/null +++ b/plugins/OptionalManager/languages/pt-br.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Arquivos %s fixados", + "Removed pin from %s files": "Arquivos %s não estão fixados", + "You started to help distribute %s.
Directory: %s": "Você começou a ajudar a distribuir %s.
Pasta: %s", + "Help distribute all new optional files on site %s": "Ajude a distribuir todos os novos arquivos opcionais no site %s", + "Yes, I want to help!": "Sim, eu quero ajudar!" +} diff --git a/plugins/OptionalManager/languages/zh-tw.json b/plugins/OptionalManager/languages/zh-tw.json new file mode 100644 index 00000000..dfa9eaf3 --- /dev/null +++ b/plugins/OptionalManager/languages/zh-tw.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "已固定 %s 個檔", + "Removed pin from %s files": "已解除固定 %s 個檔", + "You started to help distribute %s.
Directory: %s": "你已經開始幫助分發 %s
目錄:%s", + "Help distribute all new optional files on site %s": "你想要幫助分發 %s 網站的所有檔嗎?", + "Yes, I want to help!": "是,我想要幫助!" +} diff --git a/plugins/OptionalManager/languages/zh.json b/plugins/OptionalManager/languages/zh.json new file mode 100644 index 00000000..ae18118e --- /dev/null +++ b/plugins/OptionalManager/languages/zh.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "已固定 %s 个文件", + "Removed pin from %s files": "已解除固定 %s 个文件", + "You started to help distribute %s.
Directory: %s": "您已经开始帮助分发 %s
目录:%s", + "Help distribute all new optional files on site %s": "您想要帮助分发 %s 站点的所有文件吗?", + "Yes, I want to help!": "是,我想要帮助!" +} diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py new file mode 100644 index 00000000..b4c8787b --- /dev/null +++ b/plugins/PeerDb/PeerDbPlugin.py @@ -0,0 +1,100 @@ +import time +import sqlite3 +import random +import atexit + +import gevent +from Plugin import PluginManager + + +@PluginManager.registerTo("ContentDb") +class ContentDbPlugin(object): + def __init__(self, *args, **kwargs): + atexit.register(self.saveAllPeers) + super(ContentDbPlugin, self).__init__(*args, **kwargs) + + def getSchema(self): + schema = super(ContentDbPlugin, self).getSchema() + + schema["tables"]["peer"] = { + "cols": [ + ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"], + ["address", "TEXT NOT NULL"], + ["port", "INTEGER NOT NULL"], + ["hashfield", "BLOB"], + ["reputation", "INTEGER NOT NULL"], + ["time_added", "INTEGER NOT NULL"], + ["time_found", "INTEGER NOT NULL"] + ], + "indexes": [ + "CREATE UNIQUE INDEX peer_key ON peer (site_id, address, port)" + ], + "schema_changed": 2 + } + + return schema + + def loadPeers(self, site): + s = time.time() + site_id = self.site_ids.get(site.address) + res = self.execute("SELECT * FROM peer WHERE site_id = :site_id", {"site_id": site_id}) + num = 0 + num_hashfield = 0 + for row in res: + peer = site.addPeer(str(row["address"]), row["port"]) + if not peer: # Already exist + continue + if row["hashfield"]: + peer.hashfield.replaceFromBytes(row["hashfield"]) + num_hashfield += 1 + peer.time_added = row["time_added"] + peer.time_found = row["time_found"] + peer.reputation = row["reputation"] + if row["address"].endswith(".onion"): + peer.reputation = peer.reputation / 2 - 1 # Onion peers less likely working + num += 1 + if num_hashfield: + site.content_manager.has_optional_files = True + site.log.debug("%s peers (%s with hashfield) loaded in %.3fs" % (num, num_hashfield, time.time() - s)) + + def iteratePeers(self, site): + site_id = self.site_ids.get(site.address) + for key, peer in site.peers.items(): + address, port = key.rsplit(":", 1) + if peer.has_hashfield: + hashfield = sqlite3.Binary(peer.hashfield.tobytes()) + else: + hashfield = "" + yield (site_id, address, port, hashfield, peer.reputation, int(peer.time_added), int(peer.time_found)) + + def savePeers(self, site, spawn=False): + if spawn: + # Save peers every hour (+random some secs to not update very site at same time) + gevent.spawn_later(60 * 60 + random.randint(0, 60), self.savePeers, site, spawn=True) + if not site.peers: + site.log.debug("Peers not saved: No peers found") + return + s = time.time() + site_id = self.site_ids.get(site.address) + cur = self.getCursor() + try: + cur.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id}) + cur.cursor.executemany( + "INSERT INTO peer (site_id, address, port, hashfield, reputation, time_added, time_found) VALUES (?, ?, ?, ?, ?, ?, ?)", + self.iteratePeers(site) + ) + except Exception as err: + site.log.error("Save peer error: %s" % err) + site.log.debug("Peers saved in %.3fs" % (time.time() - s)) + + def initSite(self, site): + super(ContentDbPlugin, self).initSite(site) + gevent.spawn_later(0.5, self.loadPeers, site) + gevent.spawn_later(60*60, self.savePeers, site, spawn=True) + + def saveAllPeers(self): + for site in list(self.sites.values()): + try: + self.savePeers(site) + except Exception as err: + site.log.error("Save peer error: %s" % err) diff --git a/plugins/PeerDb/__init__.py b/plugins/PeerDb/__init__.py new file mode 100644 index 00000000..bc8c93b9 --- /dev/null +++ b/plugins/PeerDb/__init__.py @@ -0,0 +1,2 @@ +from . import PeerDbPlugin + diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py new file mode 100644 index 00000000..45287d49 --- /dev/null +++ b/plugins/Sidebar/SidebarPlugin.py @@ -0,0 +1,794 @@ +import re +import os +import html +import sys +import math +import time +import json +import io +import urllib +import urllib.parse + +import gevent + +from Config import config +from Plugin import PluginManager +from Debug import Debug +from Translate import Translate +from util import helper +from .ZipStream import ZipStream + +plugin_dir = "plugins/Sidebar" +media_dir = plugin_dir + "/media" +sys.path.append(plugin_dir) # To able to load geoip lib + +loc_cache = {} +if "_" not in locals(): + _ = Translate(plugin_dir + "/languages/") + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Inject our resources to end of original file streams + def actionUiMedia(self, path): + if path == "/uimedia/all.js" or path == "/uimedia/all.css": + # First yield the original file and header + body_generator = super(UiRequestPlugin, self).actionUiMedia(path) + for part in body_generator: + yield part + + # Append our media file to the end + ext = re.match(".*(js|css)$", path).group(1) + plugin_media_file = "%s/all.%s" % (media_dir, ext) + if config.debug: + # If debugging merge *.css to all.css and *.js to all.js + from Debug import DebugMedia + DebugMedia.merge(plugin_media_file) + if ext == "js": + yield _.translateData(open(plugin_media_file).read()).encode("utf8") + else: + for part in self.actionFile(plugin_media_file, send_header=False): + yield part + elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files + file_name = re.match(".*/(.*)", path).group(1) + plugin_media_file = "%s_globe/%s" % (media_dir, file_name) + if config.debug and path.endswith("all.js"): + # If debugging merge *.css to all.css and *.js to all.js + from Debug import DebugMedia + DebugMedia.merge(plugin_media_file) + for part in self.actionFile(plugin_media_file): + yield part + else: + for part in super(UiRequestPlugin, self).actionUiMedia(path): + yield part + + def actionZip(self): + address = self.get["address"] + site = self.server.site_manager.get(address) + if not site: + return self.error404("Site not found") + + title = site.content_manager.contents.get("content.json", {}).get("title", "") + filename = "%s-backup-%s.zip" % (title, time.strftime("%Y-%m-%d_%H_%M")) + filename_quoted = urllib.parse.quote(filename) + self.sendHeader(content_type="application/zip", extra_headers={'Content-Disposition': 'attachment; filename="%s"' % filename_quoted}) + + return self.streamZip(site.storage.getPath(".")) + + def streamZip(self, dir_path): + zs = ZipStream(dir_path) + while 1: + data = zs.read() + if not data: + break + yield data + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def __init__(self, *args, **kwargs): + self.async_commands.add("sidebarGetPeers") + return super(UiWebsocketPlugin, self).__init__(*args, **kwargs) + + def sidebarRenderPeerStats(self, body, site): + connected = len([peer for peer in list(site.peers.values()) if peer.connection and peer.connection.connected]) + connectable = len([peer_id for peer_id in list(site.peers.keys()) if not peer_id.endswith(":0")]) + onion = len([peer_id for peer_id in list(site.peers.keys()) if ".onion" in peer_id]) + local = len([peer for peer in list(site.peers.values()) if helper.isPrivateIp(peer.ip)]) + peers_total = len(site.peers) + + # Add myself + if site.isServing(): + peers_total += 1 + if any(site.connection_server.port_opened.values()): + connectable += 1 + if site.connection_server.tor_manager.start_onions: + onion += 1 + + if peers_total: + percent_connected = float(connected) / peers_total + percent_connectable = float(connectable) / peers_total + percent_onion = float(onion) / peers_total + else: + percent_connectable = percent_connected = percent_onion = 0 + + if local: + local_html = _("
  • {_[Local]}:{local}
  • ") + else: + local_html = "" + + peer_ips = [peer.key for peer in site.getConnectablePeers(20, allow_private=False)] + peer_ips.sort(key=lambda peer_ip: ".onion:" in peer_ip) + copy_link = "http://127.0.0.1:43110/%s/?zeronet_peers=%s" % ( + site.content_manager.contents["content.json"].get("domain", site.address), + ",".join(peer_ips) + ) + + body.append(_(""" +
  • + +
      +
    • +
    • +
    • +
    • +
    +
      +
    • {_[Connected]}:{connected}
    • +
    • {_[Connectable]}:{connectable}
    • +
    • {_[Onion]}:{onion}
    • + {local_html} +
    • {_[Total]}:{peers_total}
    • +
    +
  • + """.replace("{local_html}", local_html))) + + def sidebarRenderTransferStats(self, body, site): + recv = float(site.settings.get("bytes_recv", 0)) / 1024 / 1024 + sent = float(site.settings.get("bytes_sent", 0)) / 1024 / 1024 + transfer_total = recv + sent + if transfer_total: + percent_recv = recv / transfer_total + percent_sent = sent / transfer_total + else: + percent_recv = 0.5 + percent_sent = 0.5 + + body.append(_(""" +
  • + +
      +
    • +
    • +
    +
      +
    • {_[Received]}:{recv:.2f}MB
    • +
    • {_[Sent]}:{sent:.2f}MB
    • +
    +
  • + """)) + + def sidebarRenderFileStats(self, body, site): + body.append(_(""" +
  • + +
      + """)) + + extensions = ( + ("html", "yellow"), + ("css", "orange"), + ("js", "purple"), + ("Image", "green"), + ("json", "darkblue"), + ("User data", "blue"), + ("Other", "white"), + ("Total", "black") + ) + # Collect stats + size_filetypes = {} + size_total = 0 + contents = site.content_manager.listContents() # Without user files + for inner_path in contents: + content = site.content_manager.contents[inner_path] + if "files" not in content or content["files"] is None: + continue + for file_name, file_details in list(content["files"].items()): + size_total += file_details["size"] + ext = file_name.split(".")[-1] + size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"] + + # Get user file sizes + size_user_content = site.content_manager.contents.execute( + "SELECT SUM(size) + SUM(size_files) AS size FROM content WHERE ?", + {"not__inner_path": contents} + ).fetchone()["size"] + if not size_user_content: + size_user_content = 0 + size_filetypes["User data"] = size_user_content + size_total += size_user_content + + # The missing difference is content.json sizes + if "json" in size_filetypes: + size_filetypes["json"] += max(0, site.settings["size"] - size_total) + size_total = size_other = site.settings["size"] + + # Bar + for extension, color in extensions: + if extension == "Total": + continue + if extension == "Other": + size = max(0, size_other) + elif extension == "Image": + size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) + size_other -= size + else: + size = size_filetypes.get(extension, 0) + size_other -= size + if size_total == 0: + percent = 0 + else: + percent = 100 * (float(size) / size_total) + percent = math.floor(percent * 100) / 100 # Floor to 2 digits + body.append( + """
    • """ % + (percent, _[extension], color, _[extension]) + ) + + # Legend + body.append("
      ") + for extension, color in extensions: + if extension == "Other": + size = max(0, size_other) + elif extension == "Image": + size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) + elif extension == "Total": + size = size_total + else: + size = size_filetypes.get(extension, 0) + + if extension == "js": + title = "javascript" + else: + title = extension + + if size > 1024 * 1024 * 10: # Format as mB is more than 10mB + size_formatted = "%.0fMB" % (size / 1024 / 1024) + else: + size_formatted = "%.0fkB" % (size / 1024) + + body.append("
    • %s:%s
    • " % (color, _[title], size_formatted)) + + body.append("
  • ") + + def sidebarRenderSizeLimit(self, body, site): + free_space = helper.getFreeSpace() / 1024 / 1024 + size = float(site.settings["size"]) / 1024 / 1024 + size_limit = site.getSizeLimit() + percent_used = size / size_limit + + body.append(_(""" +
  • + + MB + {_[Set]} +
  • + """)) + + def sidebarRenderOptionalFileStats(self, body, site): + size_total = float(site.settings["size_optional"]) + size_downloaded = float(site.settings["optional_downloaded"]) + + if not size_total: + return False + + percent_downloaded = size_downloaded / size_total + + size_formatted_total = size_total / 1024 / 1024 + size_formatted_downloaded = size_downloaded / 1024 / 1024 + + body.append(_(""" +
  • + +
      +
    • +
    • +
    +
      +
    • {_[Downloaded]}:{size_formatted_downloaded:.2f}MB
    • +
    • {_[Total]}:{size_formatted_total:.2f}MB
    • +
    +
  • + """)) + + return True + + def sidebarRenderOptionalFileSettings(self, body, site): + if self.site.settings.get("autodownloadoptional"): + checked = "checked='checked'" + else: + checked = "" + + body.append(_(""" +
  • + +
    + """)) + + autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit)) + body.append(_(""" +
    + + MB + {_[Set]} +
    + """)) + body.append("
  • ") + + def sidebarRenderBadFiles(self, body, site): + body.append(_(""" +
  • + +
      + """)) + + i = 0 + for bad_file, tries in site.bad_files.items(): + i += 1 + body.append(_("""
    • {bad_filename}
    • """, { + "bad_file_path": bad_file, + "bad_filename": helper.getFilename(bad_file), + "tries": _.pluralize(tries, "{} try", "{} tries") + })) + if i > 30: + break + + if len(site.bad_files) > 30: + num_bad_files = len(site.bad_files) - 30 + body.append(_("""
    • {_[+ {num_bad_files} more]}
    • """, nested=True)) + + body.append(""" +
    +
  • + """) + + def sidebarRenderDbOptions(self, body, site): + if site.storage.db: + inner_path = site.storage.getInnerPath(site.storage.db.db_path) + size = float(site.storage.getSize(inner_path)) / 1024 + feeds = len(site.storage.db.schema.get("feeds", {})) + else: + inner_path = _["No database found"] + size = 0.0 + feeds = 0 + + body.append(_(""" +
  • + + +
  • + """, nested=True)) + + def sidebarRenderIdentity(self, body, site): + auth_address = self.user.getAuthAddress(self.site.address, create=False) + rules = self.site.content_manager.getRules("data/users/%s/content.json" % auth_address) + if rules and rules.get("max_size"): + quota = rules["max_size"] / 1024 + try: + content = site.content_manager.contents["data/users/%s/content.json" % auth_address] + used = len(json.dumps(content)) + sum([file["size"] for file in list(content["files"].values())]) + except: + used = 0 + used = used / 1024 + else: + quota = used = 0 + + body.append(_(""" +
  • + +
    + {auth_address} + {_[Change]} +
    +
  • + """)) + + def sidebarRenderControls(self, body, site): + auth_address = self.user.getAuthAddress(self.site.address, create=False) + if self.site.settings["serving"]: + class_pause = "" + class_resume = "hidden" + else: + class_pause = "hidden" + class_resume = "" + + body.append(_(""" +
  • + + {_[Update]} + {_[Pause]} + {_[Resume]} + {_[Delete]} +
  • + """)) + + donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True) + site_address = self.site.address + body.append(_(""" +
  • +
    +
    + {site_address} + """)) + if donate_key == False or donate_key == "": + pass + elif (type(donate_key) == str or type(donate_key) == str) and len(donate_key) > 0: + body.append(_(""" +
    +
  • +
  • +
    +
    + {donate_key} + """)) + else: + body.append(_(""" + {_[Donate]} + """)) + body.append(_(""" +
    +
  • + """)) + + def sidebarRenderOwnedCheckbox(self, body, site): + if self.site.settings["own"]: + checked = "checked='checked'" + else: + checked = "" + + body.append(_(""" +

    {_[This is my site]}

    +
    + """)) + + def sidebarRenderOwnSettings(self, body, site): + title = site.content_manager.contents.get("content.json", {}).get("title", "") + description = site.content_manager.contents.get("content.json", {}).get("description", "") + + body.append(_(""" +
  • + + +
  • + +
  • + + +
  • + +
  • + {_[Save site settings]} +
  • + """)) + + def sidebarRenderContents(self, body, site): + has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey")) + if has_privatekey: + tag_privatekey = _("{_[Private key saved.]} {_[Forgot]}") + else: + tag_privatekey = _("{_[Add saved private key]}") + + body.append(_(""" +
  • + + """.replace("{tag_privatekey}", tag_privatekey))) + + # Choose content you want to sign + body.append(_(""" + + """)) + + contents = ["content.json"] + contents += list(site.content_manager.contents.get("content.json", {}).get("includes", {}).keys()) + body.append(_("
    {_[Choose]}: ")) + for content in contents: + body.append(_("{content} ")) + body.append("
    ") + body.append("
  • ") + + def actionSidebarGetHtmlTag(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + site = self.site + + body = [] + + body.append("
    ") + body.append("×") + body.append("

    %s

    " % html.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True)) + + body.append("
    ") + + body.append("
      ") + + self.sidebarRenderPeerStats(body, site) + self.sidebarRenderTransferStats(body, site) + self.sidebarRenderFileStats(body, site) + self.sidebarRenderSizeLimit(body, site) + has_optional = self.sidebarRenderOptionalFileStats(body, site) + if has_optional: + self.sidebarRenderOptionalFileSettings(body, site) + self.sidebarRenderDbOptions(body, site) + self.sidebarRenderIdentity(body, site) + self.sidebarRenderControls(body, site) + if site.bad_files: + self.sidebarRenderBadFiles(body, site) + + self.sidebarRenderOwnedCheckbox(body, site) + body.append("
      ") + self.sidebarRenderOwnSettings(body, site) + self.sidebarRenderContents(body, site) + body.append("
      ") + body.append("
    ") + body.append("
    ") + + body.append("") + + self.response(to, "".join(body)) + + def downloadGeoLiteDb(self, db_path): + import gzip + import shutil + from util import helper + + if config.offline: + return False + + self.log.info("Downloading GeoLite2 City database...") + self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], 0]) + db_urls = [ + "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", + "https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz" + ] + for db_url in db_urls: + downloadl_err = None + try: + # Download + response = helper.httpRequest(db_url) + data_size = response.getheader('content-length') + data_recv = 0 + data = io.BytesIO() + while True: + buff = response.read(1024 * 512) + if not buff: + break + data.write(buff) + data_recv += 1024 * 512 + if data_size: + progress = int(float(data_recv) / int(data_size) * 100) + self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], progress]) + self.log.info("GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell()) + data.seek(0) + + # Unpack + with gzip.GzipFile(fileobj=data) as gzip_file: + shutil.copyfileobj(gzip_file, open(db_path, "wb")) + + self.cmd("progress", ["geolite-info", _["GeoLite2 City database downloaded!"], 100]) + time.sleep(2) # Wait for notify animation + self.log.info("GeoLite2 City database is ready at: %s" % db_path) + return True + except Exception as err: + download_err = err + self.log.error("Error downloading %s: %s" % (db_url, err)) + pass + self.cmd("progress", [ + "geolite-info", + _["GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}"].format(download_err, db_urls[0]), + -100 + ]) + + def getLoc(self, geodb, ip): + global loc_cache + + if ip in loc_cache: + return loc_cache[ip] + else: + try: + loc_data = geodb.get(ip) + except: + loc_data = None + + if not loc_data or "location" not in loc_data: + loc_cache[ip] = None + return None + + loc = { + "lat": loc_data["location"]["latitude"], + "lon": loc_data["location"]["longitude"], + } + if "city" in loc_data: + loc["city"] = loc_data["city"]["names"]["en"] + + if "country" in loc_data: + loc["country"] = loc_data["country"]["names"]["en"] + + loc_cache[ip] = loc + return loc + + def getGeoipDb(self): + db_name = 'GeoLite2-City.mmdb' + + sys_db_paths = [] + if sys.platform == "linux": + sys_db_paths += ['/usr/share/GeoIP/' + db_name] + + data_dir_db_path = os.path.join(config.data_dir, db_name) + + db_paths = sys_db_paths + [data_dir_db_path] + + for path in db_paths: + if os.path.isfile(path) and os.path.getsize(path) > 0: + return path + + self.log.info("GeoIP database not found at [%s]. Downloading to: %s", + " ".join(db_paths), data_dir_db_path) + if self.downloadGeoLiteDb(data_dir_db_path): + return data_dir_db_path + return None + + def getPeerLocations(self, peers): + import maxminddb + + db_path = self.getGeoipDb() + if not db_path: + self.log.debug("Not showing peer locations: no GeoIP database") + return False + + self.log.info("Loading GeoIP database from: %s" % db_path) + geodb = maxminddb.open_database(db_path) + + peers = list(peers.values()) + # Place bars + peer_locations = [] + placed = {} # Already placed bars here + for peer in peers: + # Height of bar + if peer.connection and peer.connection.last_ping_delay: + ping = round(peer.connection.last_ping_delay * 1000) + else: + ping = None + loc = self.getLoc(geodb, peer.ip) + + if not loc: + continue + # Create position array + lat, lon = loc["lat"], loc["lon"] + latlon = "%s,%s" % (lat, lon) + if latlon in placed and helper.getIpType(peer.ip) == "ipv4": # Dont place more than 1 bar to same place, fake repos using ip address last two part + lat += float(128 - int(peer.ip.split(".")[-2])) / 50 + lon += float(128 - int(peer.ip.split(".")[-1])) / 50 + latlon = "%s,%s" % (lat, lon) + placed[latlon] = True + peer_location = {} + peer_location.update(loc) + peer_location["lat"] = lat + peer_location["lon"] = lon + peer_location["ping"] = ping + + peer_locations.append(peer_location) + + # Append myself + for ip in self.site.connection_server.ip_external_list: + my_loc = self.getLoc(geodb, ip) + if my_loc: + my_loc["ping"] = 0 + peer_locations.append(my_loc) + + return peer_locations + + + def actionSidebarGetPeers(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + try: + peer_locations = self.getPeerLocations(self.site.peers) + globe_data = [] + ping_times = [ + peer_location["ping"] + for peer_location in peer_locations + if peer_location["ping"] + ] + if ping_times: + ping_avg = sum(ping_times) / float(len(ping_times)) + else: + ping_avg = 0 + + for peer_location in peer_locations: + if peer_location["ping"] == 0: # Me + height = -0.135 + elif peer_location["ping"]: + height = min(0.20, math.log(1 + peer_location["ping"] / ping_avg, 300)) + else: + height = -0.03 + + globe_data += [peer_location["lat"], peer_location["lon"], height] + + self.response(to, globe_data) + except Exception as err: + self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err)) + self.response(to, {"error": str(err)}) + + def actionSiteSetOwned(self, to, owned): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + if self.site.address == config.updatesite: + return self.response(to, "You can't change the ownership of the updater site") + + self.site.settings["own"] = bool(owned) + self.site.updateWebsocket(owned=owned) + + def actionUserSetSitePrivatekey(self, to, privatekey): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + site_data = self.user.sites[self.site.address] + site_data["privatekey"] = privatekey + self.site.updateWebsocket(set_privatekey=bool(privatekey)) + + return "ok" + + def actionSiteSetAutodownloadoptional(self, to, owned): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.settings["autodownloadoptional"] = bool(owned) + self.site.bad_files = {} + gevent.spawn(self.site.update, check_files=True) + self.site.worker_manager.removeSolvedFileTasks() + + def actionDbReload(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.storage.closeDb() + self.site.storage.getDb() + + return self.response(to, "ok") + + def actionDbRebuild(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + result = self.site.storage.rebuildDb() + + if not result: + return self.response(to, {"error": "Failed to rebuild database"}) + + return self.response(to, "ok") diff --git a/plugins/Sidebar/ZipStream.py b/plugins/Sidebar/ZipStream.py new file mode 100644 index 00000000..b6e05b21 --- /dev/null +++ b/plugins/Sidebar/ZipStream.py @@ -0,0 +1,59 @@ +import io +import os +import zipfile + +class ZipStream(object): + def __init__(self, dir_path): + self.dir_path = dir_path + self.pos = 0 + self.buff_pos = 0 + self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) + self.buff = io.BytesIO() + self.file_list = self.getFileList() + + def getFileList(self): + for root, dirs, files in os.walk(self.dir_path): + for file in files: + file_path = root + "/" + file + relative_path = os.path.join(os.path.relpath(root, self.dir_path), file) + yield file_path, relative_path + self.zf.close() + + def read(self, size=60 * 1024): + for file_path, relative_path in self.file_list: + self.zf.write(file_path, relative_path) + if self.buff.tell() >= size: + break + self.buff.seek(0) + back = self.buff.read() + self.buff.truncate(0) + self.buff.seek(0) + self.buff_pos += len(back) + return back + + def write(self, data): + self.pos += len(data) + self.buff.write(data) + + def tell(self): + return self.pos + + def seek(self, pos, whence=0): + if pos >= self.buff_pos: + self.buff.seek(pos - self.buff_pos, whence) + self.pos = pos + + def flush(self): + pass + + +if __name__ == "__main__": + zs = ZipStream(".") + out = open("out.zip", "wb") + while 1: + data = zs.read() + print("Write %s" % len(data)) + if not data: + break + out.write(data) + out.close() diff --git a/plugins/Sidebar/__init__.py b/plugins/Sidebar/__init__.py new file mode 100644 index 00000000..f2669d96 --- /dev/null +++ b/plugins/Sidebar/__init__.py @@ -0,0 +1 @@ +from . import SidebarPlugin \ No newline at end of file diff --git a/plugins/Sidebar/languages/da.json b/plugins/Sidebar/languages/da.json new file mode 100644 index 00000000..a421292c --- /dev/null +++ b/plugins/Sidebar/languages/da.json @@ -0,0 +1,81 @@ +{ + "Peers": "Klienter", + "Connected": "Forbundet", + "Connectable": "Mulige", + "Connectable peers": "Mulige klienter", + + "Data transfer": "Data overførsel", + "Received": "Modtaget", + "Received bytes": "Bytes modtaget", + "Sent": "Sendt", + "Sent bytes": "Bytes sendt", + + "Files": "Filer", + "Total": "I alt", + "Image": "Image", + "Other": "Andet", + "User data": "Bruger data", + + "Size limit": "Side max størrelse", + "limit used": "brugt", + "free space": "fri", + "Set": "Opdater", + + "Optional files": "Valgfri filer", + "Downloaded": "Downloadet", + "Download and help distribute all files": "Download og hjælp med at dele filer", + "Total size": "Størrelse i alt", + "Downloaded files": "Filer downloadet", + + "Database": "Database", + "search feeds": "søgninger", + "{feeds} query": "{feeds} søgninger", + "Reload": "Genindlæs", + "Rebuild": "Genopbyg", + "No database found": "Ingen database fundet", + + "Identity address": "Autorisations ID", + "Change": "Skift", + + "Update": "Opdater", + "Pause": "Pause", + "Resume": "Aktiv", + "Delete": "Slet", + "Are you sure?": "Er du sikker?", + + "Site address": "Side addresse", + "Donate": "Doner penge", + + "Missing files": "Manglende filer", + "{} try": "{} forsøg", + "{} tries": "{} forsøg", + "+ {num_bad_files} more": "+ {num_bad_files} mere", + + "This is my site": "Dette er min side", + "Site title": "Side navn", + "Site description": "Side beskrivelse", + "Save site settings": "Gem side opsætning", + + "Content publishing": "Indhold offentliggøres", + "Choose": "Vælg", + "Sign": "Signer", + "Publish": "Offentliggør", + + "This function is disabled on this proxy": "Denne funktion er slået fra på denne ZeroNet proxyEz a funkció ki van kapcsolva ezen a proxy-n", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City database kunne ikke downloades: {}!
    Download venligst databasen manuelt og udpak i data folder:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 City database downloadet!", + + "Are you sure?": "Er du sikker?", + "Site storage limit modified!": "Side max størrelse ændret!", + "Database schema reloaded!": "Database definition genindlæst!", + "Database rebuilding....": "Genopbygger database...", + "Database rebuilt!": "Database genopbygget!", + "Site updated!": "Side opdateret!", + "Delete this site": "Slet denne side", + "File write error: ": "Fejl ved skrivning af fil: ", + "Site settings saved!": "Side opsætning gemt!", + "Enter your private key:": "Indtast din private nøgle:", + " Signed!": " Signeret!", + "WebGL not supported": "WebGL er ikke supporteret" +} \ No newline at end of file diff --git a/plugins/Sidebar/languages/de.json b/plugins/Sidebar/languages/de.json new file mode 100644 index 00000000..2f5feacd --- /dev/null +++ b/plugins/Sidebar/languages/de.json @@ -0,0 +1,81 @@ +{ + "Peers": "Peers", + "Connected": "Verbunden", + "Connectable": "Verbindbar", + "Connectable peers": "Verbindbare Peers", + + "Data transfer": "Datei Transfer", + "Received": "Empfangen", + "Received bytes": "Empfangene Bytes", + "Sent": "Gesendet", + "Sent bytes": "Gesendete Bytes", + + "Files": "Dateien", + "Total": "Gesamt", + "Image": "Bilder", + "Other": "Sonstiges", + "User data": "Nutzer Daten", + + "Size limit": "Speicher Limit", + "limit used": "Limit benutzt", + "free space": "freier Speicher", + "Set": "Setzten", + + "Optional files": "Optionale Dateien", + "Downloaded": "Heruntergeladen", + "Download and help distribute all files": "Herunterladen und helfen alle Dateien zu verteilen", + "Total size": "Gesamte Größe", + "Downloaded files": "Heruntergeladene Dateien", + + "Database": "Datenbank", + "search feeds": "Feeds durchsuchen", + "{feeds} query": "{feeds} Abfrage", + "Reload": "Neu laden", + "Rebuild": "Neu bauen", + "No database found": "Keine Datenbank gefunden", + + "Identity address": "Identitäts Adresse", + "Change": "Ändern", + + "Update": "Aktualisieren", + "Pause": "Pausieren", + "Resume": "Fortsetzen", + "Delete": "Löschen", + "Are you sure?": "Bist du sicher?", + + "Site address": "Seiten Adresse", + "Donate": "Spenden", + + "Missing files": "Fehlende Dateien", + "{} try": "{} versuch", + "{} tries": "{} versuche", + "+ {num_bad_files} more": "+ {num_bad_files} mehr", + + "This is my site": "Das ist meine Seite", + "Site title": "Seiten Titel", + "Site description": "Seiten Beschreibung", + "Save site settings": "Einstellungen der Seite speichern", + + "Content publishing": "Inhaltsveröffentlichung", + "Choose": "Wähle", + "Sign": "Signieren", + "Publish": "Veröffentlichen", + + "This function is disabled on this proxy": "Diese Funktion ist auf dieser Proxy deaktiviert", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City Datenbank Download Fehler: {}!
    Bitte manuell herunterladen und die Datei in das Datei Verzeichnis extrahieren:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Herunterladen der GeoLite2 City Datenbank (einmalig, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 City Datenbank heruntergeladen!", + + "Are you sure?": "Bist du sicher?", + "Site storage limit modified!": "Speicher Limit der Seite modifiziert!", + "Database schema reloaded!": "Datebank Schema neu geladen!", + "Database rebuilding....": "Datenbank neu bauen...", + "Database rebuilt!": "Datenbank neu gebaut!", + "Site updated!": "Seite aktualisiert!", + "Delete this site": "Diese Seite löschen", + "File write error: ": "Datei schreib fehler:", + "Site settings saved!": "Seiten Einstellungen gespeichert!", + "Enter your private key:": "Gib deinen privaten Schlüssel ein:", + " Signed!": " Signiert!", + "WebGL not supported": "WebGL nicht unterstützt" +} diff --git a/plugins/Sidebar/languages/es.json b/plugins/Sidebar/languages/es.json new file mode 100644 index 00000000..b9e98c46 --- /dev/null +++ b/plugins/Sidebar/languages/es.json @@ -0,0 +1,79 @@ +{ + "Peers": "Pares", + "Connected": "Conectados", + "Connectable": "Conectables", + "Connectable peers": "Pares conectables", + + "Data transfer": "Transferencia de datos", + "Received": "Recibidos", + "Received bytes": "Bytes recibidos", + "Sent": "Enviados", + "Sent bytes": "Bytes envidados", + + "Files": "Ficheros", + "Total": "Total", + "Image": "Imagen", + "Other": "Otro", + "User data": "Datos del usuario", + + "Size limit": "Límite de tamaño", + "limit used": "Límite utilizado", + "free space": "Espacio libre", + "Set": "Establecer", + + "Optional files": "Ficheros opcionales", + "Downloaded": "Descargado", + "Download and help distribute all files": "Descargar y ayudar a distribuir todos los ficheros", + "Total size": "Tamaño total", + "Downloaded files": "Ficheros descargados", + + "Database": "Base de datos", + "search feeds": "Fuentes de búsqueda", + "{feeds} query": "{feeds} consulta", + "Reload": "Recargar", + "Rebuild": "Reconstruir", + "No database found": "No se ha encontrado la base de datos", + + "Identity address": "Dirección de la identidad", + "Change": "Cambiar", + + "Update": "Actualizar", + "Pause": "Pausar", + "Resume": "Reanudar", + "Delete": "Borrar", + + "Site address": "Dirección del sitio", + "Donate": "Donar", + + "Missing files": "Ficheros perdidos", + "{} try": "{} intento", + "{} tries": "{} intentos", + "+ {num_bad_files} more": "+ {num_bad_files} más", + + "This is my site": "Este es mi sitio", + "Site title": "Título del sitio", + "Site description": "Descripción del sitio", + "Save site settings": "Guardar la configuración del sitio", + + "Content publishing": "Publicación del contenido", + "Choose": "Elegir", + "Sign": "Firmar", + "Publish": "Publicar", + "This function is disabled on this proxy": "Esta función está deshabilitada en este proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "¡Error de la base de datos GeoLite2: {}!
    Por favor, descárgalo manualmente y descomprime al directorio de datos:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Descargando la base de datos de GeoLite2 (una única vez, ~20MB)...", + "GeoLite2 City database downloaded!": "¡Base de datos de GeoLite2 descargada!", + + "Are you sure?": "¿Estás seguro?", + "Site storage limit modified!": "¡Límite de almacenamiento del sitio modificado!", + "Database schema reloaded!": "¡Esquema de la base de datos recargado!", + "Database rebuilding....": "Reconstruyendo la base de datos...", + "Database rebuilt!": "¡Base de datos reconstruida!", + "Site updated!": "¡Sitio actualizado!", + "Delete this site": "Borrar este sitio", + "File write error: ": "Error de escritura de fichero:", + "Site settings saved!": "¡Configuración del sitio guardada!", + "Enter your private key:": "Introduce tu clave privada:", + " Signed!": " ¡firmado!", + "WebGL not supported": "WebGL no está soportado" +} diff --git a/plugins/Sidebar/languages/fr.json b/plugins/Sidebar/languages/fr.json new file mode 100644 index 00000000..5c4b3ac7 --- /dev/null +++ b/plugins/Sidebar/languages/fr.json @@ -0,0 +1,82 @@ +{ + "Peers": "Pairs", + "Connected": "Connectés", + "Connectable": "Accessibles", + "Connectable peers": "Pairs accessibles", + + "Data transfer": "Données transférées", + "Received": "Reçues", + "Received bytes": "Bytes reçus", + "Sent": "Envoyées", + "Sent bytes": "Bytes envoyés", + + "Files": "Fichiers", + "Total": "Total", + "Image": "Image", + "Other": "Autre", + "User data": "Utilisateurs", + + "Size limit": "Taille maximale", + "limit used": "utlisé", + "free space": "libre", + "Set": "Modifier", + + "Optional files": "Fichiers optionnels", + "Downloaded": "Téléchargé", + "Download and help distribute all files": "Télécharger et distribuer tous les fichiers", + "Total size": "Taille totale", + "Downloaded files": "Fichiers téléchargés", + + "Database": "Base de données", + "search feeds": "recherche", + "{feeds} query": "{feeds} requête", + "Reload": "Recharger", + "Rebuild": "Reconstruire", + "No database found": "Aucune base de données trouvée", + + "Identity address": "Adresse d'identité", + "Change": "Modifier", + + "Site control": "Opérations", + "Update": "Mettre à jour", + "Pause": "Suspendre", + "Resume": "Reprendre", + "Delete": "Supprimer", + "Are you sure?": "Êtes-vous certain?", + + "Site address": "Adresse du site", + "Donate": "Faire un don", + + "Missing files": "Fichiers manquants", + "{} try": "{} essai", + "{} tries": "{} essais", + "+ {num_bad_files} more": "+ {num_bad_files} manquants", + + "This is my site": "Ce site m'appartient", + "Site title": "Nom du site", + "Site description": "Description du site", + "Save site settings": "Enregistrer les paramètres", + + "Content publishing": "Publication du contenu", + "Choose": "Sélectionner", + "Sign": "Signer", + "Publish": "Publier", + + "This function is disabled on this proxy": "Cette fonction est désactivé sur ce proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erreur au téléchargement de la base de données GeoLite2: {}!
    Téléchargez et décompressez dans le dossier data:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Téléchargement de la base de données GeoLite2 (une seule fois, ~20MB)...", + "GeoLite2 City database downloaded!": "Base de données GeoLite2 téléchargée!", + + "Are you sure?": "Êtes-vous certain?", + "Site storage limit modified!": "Taille maximale modifiée!", + "Database schema reloaded!": "Base de données rechargée!", + "Database rebuilding....": "Reconstruction de la base de données...", + "Database rebuilt!": "Base de données reconstruite!", + "Site updated!": "Site mis à jour!", + "Delete this site": "Supprimer ce site", + "File write error: ": "Erreur à l'écriture du fichier: ", + "Site settings saved!": "Paramètres du site enregistrés!", + "Enter your private key:": "Entrez votre clé privée:", + " Signed!": " Signé!", + "WebGL not supported": "WebGL n'est pas supporté" +} diff --git a/plugins/Sidebar/languages/hu.json b/plugins/Sidebar/languages/hu.json new file mode 100644 index 00000000..40ed8fab --- /dev/null +++ b/plugins/Sidebar/languages/hu.json @@ -0,0 +1,82 @@ +{ + "Peers": "Csatlakozási pontok", + "Connected": "Csaltakozva", + "Connectable": "Csatlakozható", + "Connectable peers": "Csatlakozható peer-ek", + + "Data transfer": "Adatátvitel", + "Received": "Fogadott", + "Received bytes": "Fogadott byte-ok", + "Sent": "Küldött", + "Sent bytes": "Küldött byte-ok", + + "Files": "Fájlok", + "Total": "Összesen", + "Image": "Kép", + "Other": "Egyéb", + "User data": "Felh. adat", + + "Size limit": "Méret korlát", + "limit used": "felhasznált", + "free space": "szabad hely", + "Set": "Beállít", + + "Optional files": "Opcionális fájlok", + "Downloaded": "Letöltött", + "Download and help distribute all files": "Minden opcionális fájl letöltése", + "Total size": "Teljes méret", + "Downloaded files": "Letöltve", + + "Database": "Adatbázis", + "search feeds": "Keresés források", + "{feeds} query": "{feeds} lekérdezés", + "Reload": "Újratöltés", + "Rebuild": "Újraépítés", + "No database found": "Adatbázis nem található", + + "Identity address": "Azonosító cím", + "Change": "Módosít", + + "Site control": "Oldal műveletek", + "Update": "Frissít", + "Pause": "Szünteltet", + "Resume": "Folytat", + "Delete": "Töröl", + "Are you sure?": "Biztos vagy benne?", + + "Site address": "Oldal címe", + "Donate": "Támogatás", + + "Missing files": "Hiányzó fájlok", + "{} try": "{} próbálkozás", + "{} tries": "{} próbálkozás", + "+ {num_bad_files} more": "+ még {num_bad_files} darab", + + "This is my site": "Ez az én oldalam", + "Site title": "Oldal neve", + "Site description": "Oldal leírása", + "Save site settings": "Oldal beállítások mentése", + + "Content publishing": "Tartalom publikálás", + "Choose": "Válassz", + "Sign": "Aláírás", + "Publish": "Publikálás", + + "This function is disabled on this proxy": "Ez a funkció ki van kapcsolva ezen a proxy-n", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 város adatbázis letöltési hiba: {}!
    A térképhez töltsd le és csomagold ki a data könyvtárba:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 város adatbázis letöltve!", + + "Are you sure?": "Biztos vagy benne?", + "Site storage limit modified!": "Az oldalt méret korlát módosítva!", + "Database schema reloaded!": "Adatbázis séma újratöltve!", + "Database rebuilding....": "Adatbázis újraépítés...", + "Database rebuilt!": "Adatbázis újraépítve!", + "Site updated!": "Az oldal frissítve!", + "Delete this site": "Az oldal törlése", + "File write error: ": "Fájl írási hiba: ", + "Site settings saved!": "Az oldal beállításai elmentve!", + "Enter your private key:": "Add meg a prviát kulcsod:", + " Signed!": " Aláírva!", + "WebGL not supported": "WebGL nem támogatott" +} \ No newline at end of file diff --git a/plugins/Sidebar/languages/it.json b/plugins/Sidebar/languages/it.json new file mode 100644 index 00000000..6aa0969a --- /dev/null +++ b/plugins/Sidebar/languages/it.json @@ -0,0 +1,81 @@ +{ + "Peers": "Peer", + "Connected": "Connessi", + "Connectable": "Collegabili", + "Connectable peers": "Peer collegabili", + + "Data transfer": "Trasferimento dati", + "Received": "Ricevuti", + "Received bytes": "Byte ricevuti", + "Sent": "Inviati", + "Sent bytes": "Byte inviati", + + "Files": "File", + "Total": "Totale", + "Image": "Imagine", + "Other": "Altro", + "User data": "Dati utente", + + "Size limit": "Limite dimensione", + "limit used": "limite usato", + "free space": "spazio libero", + "Set": "Imposta", + + "Optional files": "File facoltativi", + "Downloaded": "Scaricati", + "Download and help distribute all files": "Scarica e aiuta a distribuire tutti i file", + "Total size": "Dimensione totale", + "Downloaded files": "File scaricati", + + "Database": "Database", + "search feeds": "ricerca di feed", + "{feeds} query": "{feeds} interrogazione", + "Reload": "Ricaricare", + "Rebuild": "Ricostruire", + "No database found": "Nessun database trovato", + + "Identity address": "Indirizzo di identità", + "Change": "Cambia", + + "Update": "Aggiorna", + "Pause": "Sospendi", + "Resume": "Riprendi", + "Delete": "Cancella", + "Are you sure?": "Sei sicuro?", + + "Site address": "Indirizzo sito", + "Donate": "Dona", + + "Missing files": "File mancanti", + "{} try": "{} tenta", + "{} tries": "{} prova", + "+ {num_bad_files} more": "+ {num_bad_files} altri", + + "This is my site": "Questo è il mio sito", + "Site title": "Titolo sito", + "Site description": "Descrizione sito", + "Save site settings": "Salva impostazioni sito", + + "Content publishing": "Pubblicazione contenuto", + "Choose": "Scegli", + "Sign": "Firma", + "Publish": "Pubblica", + + "This function is disabled on this proxy": "Questa funzione è disabilitata su questo proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Errore scaricamento database GeoLite2 City: {}!
    Si prega di scaricarlo manualmente e spacchetarlo nella cartella dir:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Scaricamento database GeoLite2 City (solo una volta, ~20MB)...", + "GeoLite2 City database downloaded!": "Database GeoLite2 City scaricato!", + + "Are you sure?": "Sei sicuro?", + "Site storage limit modified!": "Limite di archiviazione del sito modificato!", + "Database schema reloaded!": "Schema database ricaricato!", + "Database rebuilding....": "Ricostruzione database...", + "Database rebuilt!": "Database ricostruito!", + "Site updated!": "Sito aggiornato!", + "Delete this site": "Cancella questo sito", + "File write error: ": "Errore scrittura file:", + "Site settings saved!": "Impostazioni sito salvate!", + "Enter your private key:": "Inserisci la tua chiave privata:", + " Signed!": " Firmato!", + "WebGL not supported": "WebGL non supportato" +} diff --git a/plugins/Sidebar/languages/jp.json b/plugins/Sidebar/languages/jp.json new file mode 100644 index 00000000..99b34564 --- /dev/null +++ b/plugins/Sidebar/languages/jp.json @@ -0,0 +1,82 @@ +{ + "Peers": "ピア", + "Connected": "接続済み", + "Connectable": "利用可能", + "Connectable peers": "ピアに接続可能", + + "Data transfer": "データ転送", + "Received": "受信", + "Received bytes": "受信バイト数", + "Sent": "送信", + "Sent bytes": "送信バイト数", + + "Files": "ファイル", + "Total": "合計", + "Image": "画像", + "Other": "その他", + "User data": "ユーザーデータ", + + "Size limit": "サイズ制限", + "limit used": "使用上限", + "free space": "フリースペース", + "Set": "セット", + + "Optional files": "オプション ファイル", + "Downloaded": "ダウンロード済み", + "Download and help distribute all files": "ダウンロードしてすべてのファイルの配布を支援する", + "Total size": "合計サイズ", + "Downloaded files": "ダウンロードされたファイル", + + "Database": "データベース", + "search feeds": "フィードを検索する", + "{feeds} query": "{フィード} お問い合わせ", + "Reload": "再読込", + "Rebuild": "再ビルド", + "No database found": "データベースが見つかりません", + + "Identity address": "Identity address", + "Change": "編集", + + "Site control": "サイト管理", + "Update": "更新", + "Pause": "一時停止", + "Resume": "再開", + "Delete": "削除", + "Are you sure?": "本当によろしいですか?", + + "Site address": "サイトアドレス", + "Donate": "寄付する", + + "Missing files": "ファイルがありません", + "{} try": "{} 試す", + "{} tries": "{} 試行", + "+ {num_bad_files} more": "+ {num_bad_files} more", + + "This is my site": "This is my site", + "Site title": "サイトタイトル", + "Site description": "サイトの説明", + "Save site settings": "サイトの設定を保存する", + + "Content publishing": "コンテンツを公開する", + "Choose": "選択", + "Sign": "Sign", + "Publish": "公開する", + + "This function is disabled on this proxy": "この機能はこのプロキシで無効になっています", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Cityデータベースのダウンロードエラー: {}!
    手動でダウンロードして、フォルダに解凍してください。:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Cityデータベースの読み込み (これは一度だけ行われます, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 Cityデータベースがダウンロードされました!", + + "Are you sure?": "本当によろしいですか?", + "Site storage limit modified!": "サイトの保存容量の制限が変更されました!", + "Database schema reloaded!": "データベーススキーマがリロードされました!", + "Database rebuilding....": "データベースの再構築中....", + "Database rebuilt!": "データベースが再構築されました!", + "Site updated!": "サイトが更新されました!", + "Delete this site": "このサイトを削除する", + "File write error: ": "ファイル書き込みエラー:", + "Site settings saved!": "サイト設定が保存されました!", + "Enter your private key:": "秘密鍵を入力してください:", + " Signed!": " Signed!", + "WebGL not supported": "WebGLはサポートされていません" +} diff --git a/plugins/Sidebar/languages/pl.json b/plugins/Sidebar/languages/pl.json new file mode 100644 index 00000000..93268507 --- /dev/null +++ b/plugins/Sidebar/languages/pl.json @@ -0,0 +1,82 @@ +{ + "Peers": "Użytkownicy równorzędni", + "Connected": "Połączony", + "Connectable": "Możliwy do podłączenia", + "Connectable peers": "Połączeni użytkownicy równorzędni", + + "Data transfer": "Transfer danych", + "Received": "Odebrane", + "Received bytes": "Odebrany bajty", + "Sent": "Wysłane", + "Sent bytes": "Wysłane bajty", + + "Files": "Pliki", + "Total": "Sumarycznie", + "Image": "Obraz", + "Other": "Inne", + "User data": "Dane użytkownika", + + "Size limit": "Rozmiar limitu", + "limit used": "zużyty limit", + "free space": "wolna przestrzeń", + "Set": "Ustaw", + + "Optional files": "Pliki opcjonalne", + "Downloaded": "Ściągnięte", + "Download and help distribute all files": "Ściągnij i pomóż rozpowszechniać wszystkie pliki", + "Total size": "Rozmiar sumaryczny", + "Downloaded files": "Ściągnięte pliki", + + "Database": "Baza danych", + "search feeds": "przeszukaj zasoby", + "{feeds} query": "{feeds} pytanie", + "Reload": "Odśwież", + "Rebuild": "Odbuduj", + "No database found": "Nie odnaleziono bazy danych", + + "Identity address": "Adres identyfikacyjny", + "Change": "Zmień", + + "Site control": "Kontrola strony", + "Update": "Zaktualizuj", + "Pause": "Wstrzymaj", + "Resume": "Wznów", + "Delete": "Skasuj", + "Are you sure?": "Jesteś pewien?", + + "Site address": "Adres strony", + "Donate": "Wspomóż", + + "Missing files": "Brakujące pliki", + "{} try": "{} próba", + "{} tries": "{} próby", + "+ {num_bad_files} more": "+ {num_bad_files} więcej", + + "This is my site": "To moja strona", + "Site title": "Tytuł strony", + "Site description": "Opis strony", + "Save site settings": "Zapisz ustawienia strony", + + "Content publishing": "Publikowanie treści", + "Choose": "Wybierz", + "Sign": "Podpisz", + "Publish": "Opublikuj", + + "This function is disabled on this proxy": "Ta funkcja jest zablokowana w tym proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Błąd ściągania bazy danych GeoLite2 City: {}!
    Proszę ściągnąć ją recznie i wypakować do katalogu danych:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Ściąganie bazy danych GeoLite2 City (tylko jednorazowo, ok. 20MB)...", + "GeoLite2 City database downloaded!": "Baza danych GeoLite2 City ściagnięta!", + + "Are you sure?": "Jesteś pewien?", + "Site storage limit modified!": "Limit pamięci strony zmodyfikowany!", + "Database schema reloaded!": "Schemat bazy danych załadowany ponownie!", + "Database rebuilding....": "Przebudowywanie bazy danych...", + "Database rebuilt!": "Baza danych przebudowana!", + "Site updated!": "Strona zaktualizowana!", + "Delete this site": "Usuń tę stronę", + "File write error: ": "Błąd zapisu pliku: ", + "Site settings saved!": "Ustawienia strony zapisane!", + "Enter your private key:": "Wpisz swój prywatny klucz:", + " Signed!": " Podpisane!", + "WebGL not supported": "WebGL nie jest obsługiwany" +} diff --git a/plugins/Sidebar/languages/pt-br.json b/plugins/Sidebar/languages/pt-br.json new file mode 100644 index 00000000..44fe06a2 --- /dev/null +++ b/plugins/Sidebar/languages/pt-br.json @@ -0,0 +1,97 @@ +{ + "Copy to clipboard": "Copiar para área de transferência (clipboard)", + "Peers": "Peers", + "Connected": "Ligados", + "Connectable": "Disponíveis", + "Onion": "Onion", + "Local": "Locais", + "Connectable peers": "Peers disponíveis", + + "Data transfer": "Transferência de dados", + "Received": "Recebidos", + "Received bytes": "Bytes recebidos", + "Sent": "Enviados", + "Sent bytes": "Bytes enviados", + + "Files": "Arquivos", + "Save as .zip": "Salvar como .zip", + "Total": "Total", + "Image": "Imagem", + "Other": "Outros", + "User data": "Dados do usuário", + + "Size limit": "Limite de tamanho", + "limit used": "limite utilizado", + "free space": "espaço livre", + "Set": "Definir", + + "Optional files": "Arquivos opcionais", + "Downloaded": "Baixados", + "Download and help distribute all files": "Baixar e ajudar a distribuir todos os arquivos", + "Total size": "Tamanho total", + "Downloaded files": "Arquivos baixados", + + "Database": "Banco de dados", + "search feeds": "pesquisar feeds", + "{feeds} query": "consulta de {feeds}", + "Reload": "Recarregar", + "Rebuild": "Reconstruir", + "No database found": "Base de dados não encontrada", + + "Identity address": "Endereço de identidade", + "Change": "Alterar", + + "Site control": "Controle do site", + "Update": "Atualizar", + "Pause": "Suspender", + "Resume": "Continuar", + "Delete": "Remover", + "Are you sure?": "Tem certeza?", + + "Site address": "Endereço do site", + "Donate": "Doar", + + "Needs to be updated": "Necessitam ser atualizados", + "{} try": "{} tentativa", + "{} tries": "{} tentativas", + "+ {num_bad_files} more": "+ {num_bad_files} adicionais", + + "This is my site": "Este é o meu site", + "Site title": "Título do site", + "Site description": "Descrição do site", + "Save site settings": "Salvar definições do site", + "Open site directory": "Abrir diretório do site", + + "Content publishing": "内容发布", + "Content publishing": "Publicação do conteúdo", + "Choose": "Escolher", + "Sign": "Assinar", + "Publish": "Publicar", + "Sign and publish": "Assinar e publicar", + "add saved private key": "adicionar privatekey (chave privada) para salvar", + "Private key saved for site signing": "Privatekey foi salva para assinar o site", + "Private key saved.": "Privatekey salva.", + "forgot": "esquecer", + "Saved private key removed": "Privatekey salva foi removida", + "This function is disabled on this proxy": "Esta função encontra-se desativada neste proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erro ao baixar a base de dados GeoLite2 City: {}!
    Por favor baixe manualmente e descompacte os dados para a seguinte pasta:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Baixando a base de dados GeoLite2 City (uma única vez, ~20MB)...", + "GeoLite2 City database downloaded!": "A base de dados GeoLite2 City foi baixada!", + + "Are you sure?": "Tem certeza?", + "Site storage limit modified!": "O limite de armazenamento do site foi modificado!", + "Database schema reloaded!": "O esquema da base de dados foi atualizado!", + "Database rebuilding....": "Reconstruindo base de dados...", + "Database rebuilt!": "Base de dados reconstruída!", + "Site updated!": "Site atualizado!", + "Delete this site": "Remover este site", + "Blacklist": "Blacklist", + "Blacklist this site": "Blacklistar este site", + "Reason": "Motivo", + "Delete and Blacklist": "Deletar e blacklistar", + "File write error: ": "Erro de escrita de arquivo: ", + "Site settings saved!": "Definições do site salvas!", + "Enter your private key:": "Digite sua chave privada:", + " Signed!": " Assinado!", + "WebGL not supported": "WebGL não é suportado" +} diff --git a/plugins/Sidebar/languages/ru.json b/plugins/Sidebar/languages/ru.json new file mode 100644 index 00000000..f2eeca04 --- /dev/null +++ b/plugins/Sidebar/languages/ru.json @@ -0,0 +1,82 @@ +{ + "Peers": "Пиры", + "Connected": "Подключенные", + "Connectable": "Доступные", + "Connectable peers": "Пиры доступны для подключения", + + "Data transfer": "Передача данных", + "Received": "Получено", + "Received bytes": "Получено байн", + "Sent": "Отправлено", + "Sent bytes": "Отправлено байт", + + "Files": "Файлы", + "Total": "Всего", + "Image": "Изображений", + "Other": "Другое", + "User data": "Ваш контент", + + "Size limit": "Ограничение по размеру", + "limit used": "Использовано", + "free space": "Доступно", + "Set": "Установить", + + "Optional files": "Опциональные файлы", + "Downloaded": "Загружено", + "Download and help distribute all files": "Загрузить опциональные файлы для помощи сайту", + "Total size": "Объём", + "Downloaded files": "Загруженные файлы", + + "Database": "База данных", + "search feeds": "поиск подписок", + "{feeds} query": "{feeds} запрос", + "Reload": "Перезагрузить", + "Rebuild": "Перестроить", + "No database found": "База данных не найдена", + + "Identity address": "Уникальный адрес", + "Change": "Изменить", + + "Site control": "Управление сайтом", + "Update": "Обновить", + "Pause": "Пауза", + "Resume": "Продолжить", + "Delete": "Удалить", + "Are you sure?": "Вы уверены?", + + "Site address": "Адрес сайта", + "Donate": "Пожертвовать", + + "Missing files": "Отсутствующие файлы", + "{} try": "{} попробовать", + "{} tries": "{} попыток", + "+ {num_bad_files} more": "+ {num_bad_files} ещё", + + "This is my site": "Это мой сайт", + "Site title": "Название сайта", + "Site description": "Описание сайта", + "Save site settings": "Сохранить настройки сайта", + + "Content publishing": "Публикация контента", + "Choose": "Выбрать", + "Sign": "Подписать", + "Publish": "Опубликовать", + + "This function is disabled on this proxy": "Эта функция отключена на этом прокси", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Ошибка загрузки базы городов GeoLite2: {}!
    Пожалуйста, загрузите её вручную и распакуйте в папку:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Загрузка базы городов GeoLite2 (это делается только 1 раз, ~20MB)...", + "GeoLite2 City database downloaded!": "База GeoLite2 успешно загружена!", + + "Are you sure?": "Вы уверены?", + "Site storage limit modified!": "Лимит хранилища для сайта изменен!", + "Database schema reloaded!": "Схема базы данных перезагружена!", + "Database rebuilding....": "Перестройка базы данных...", + "Database rebuilt!": "База данных перестроена!", + "Site updated!": "Сайт обновлён!", + "Delete this site": "Удалить этот сайт", + "File write error: ": "Ошибка записи файла:", + "Site settings saved!": "Настройки сайта сохранены!", + "Enter your private key:": "Введите свой приватный ключ:", + " Signed!": " Подписано!", + "WebGL not supported": "WebGL не поддерживается" +} diff --git a/plugins/Sidebar/languages/tr.json b/plugins/Sidebar/languages/tr.json new file mode 100644 index 00000000..88fcd6e0 --- /dev/null +++ b/plugins/Sidebar/languages/tr.json @@ -0,0 +1,82 @@ +{ + "Peers": "Eşler", + "Connected": "Bağlı", + "Connectable": "Erişilebilir", + "Connectable peers": "Bağlanılabilir eşler", + + "Data transfer": "Veri aktarımı", + "Received": "Alınan", + "Received bytes": "Bayt alındı", + "Sent": "Gönderilen", + "Sent bytes": "Bayt gönderildi", + + "Files": "Dosyalar", + "Total": "Toplam", + "Image": "Resim", + "Other": "Diğer", + "User data": "Kullanıcı verisi", + + "Size limit": "Boyut sınırı", + "limit used": "kullanılan", + "free space": "boş", + "Set": "Ayarla", + + "Optional files": "İsteğe bağlı dosyalar", + "Downloaded": "İndirilen", + "Download and help distribute all files": "Tüm dosyaları indir ve yayılmalarına yardım et", + "Total size": "Toplam boyut", + "Downloaded files": "İndirilen dosyalar", + + "Database": "Veritabanı", + "search feeds": "kaynak ara", + "{feeds} query": "{feeds} sorgu", + "Reload": "Yenile", + "Rebuild": "Yapılandır", + "No database found": "Veritabanı yok", + + "Identity address": "Kimlik adresi", + "Change": "Değiştir", + + "Site control": "Site kontrolü", + "Update": "Güncelle", + "Pause": "Duraklat", + "Resume": "Sürdür", + "Delete": "Sil", + "Are you sure?": "Emin misin?", + + "Site address": "Site adresi", + "Donate": "Bağış yap", + + "Missing files": "Eksik dosyalar", + "{} try": "{} deneme", + "{} tries": "{} deneme", + "+ {num_bad_files} more": "+ {num_bad_files} tane daha", + + "This is my site": "Bu benim sitem", + "Site title": "Site başlığı", + "Site description": "Site açıklaması", + "Save site settings": "Site ayarlarını kaydet", + + "Content publishing": "İçerik yayımlanıyor", + "Choose": "Seç", + "Sign": "İmzala", + "Publish": "Yayımla", + + "This function is disabled on this proxy": "Bu özellik bu vekilde kullanılamaz", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Şehir veritabanı indirme hatası: {}!
    Lütfen kendiniz indirip aşağıdaki konuma açınınız:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Şehir veritabanı indiriliyor (sadece bir kere, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 Şehir veritabanı indirildi!", + + "Are you sure?": "Emin misiniz?", + "Site storage limit modified!": "Site saklama sınırı değiştirildi!", + "Database schema reloaded!": "Veritabanı şeması yeniden yüklendi!", + "Database rebuilding....": "Veritabanı yeniden inşa ediliyor...", + "Database rebuilt!": "Veritabanı yeniden inşa edildi!", + "Site updated!": "Site güncellendi!", + "Delete this site": "Bu siteyi sil", + "File write error: ": "Dosya yazma hatası: ", + "Site settings saved!": "Site ayarları kaydedildi!", + "Enter your private key:": "Özel anahtarınızı giriniz:", + " Signed!": " İmzala!", + "WebGL not supported": "WebGL desteklenmiyor" +} diff --git a/plugins/Sidebar/languages/zh-tw.json b/plugins/Sidebar/languages/zh-tw.json new file mode 100644 index 00000000..9d4ea1be --- /dev/null +++ b/plugins/Sidebar/languages/zh-tw.json @@ -0,0 +1,83 @@ +{ + "Peers": "節點數", + "Connected": "已連線", + "Connectable": "可連線", + "Connectable peers": "可連線節點", + + "Data transfer": "數據傳輸", + "Received": "已接收", + "Received bytes": "已接收位元組", + "Sent": "已傳送", + "Sent bytes": "已傳送位元組", + + "Files": "檔案", + "Total": "共計", + "Image": "圖片", + "Other": "其他", + "User data": "使用者數據", + + "Size limit": "大小限制", + "limit used": "已使用", + "free space": "可用空間", + "Set": "偏好設定", + + "Optional files": "可選檔案", + "Downloaded": "已下載", + "Download and help distribute all files": "下載並幫助分發所有檔案", + "Total size": "總大小", + "Downloaded files": "下載的檔案", + + "Database": "資料庫", + "search feeds": "搜尋供稿", + "{feeds} query": "{feeds} 查詢 ", + "Reload": "重新整理", + "Rebuild": "重建", + "No database found": "未找到資料庫", + + "Identity address": "身分位址", + "Change": "變更", + + "Site control": "網站控制", + "Update": "更新", + "Pause": "暫停", + "Resume": "恢復", + "Delete": "刪除", + "Are you sure?": "你確定?", + + "Site address": "網站位址", + "Donate": "捐贈", + + "Missing files": "缺少的檔案", + "{} try": "{} 嘗試", + "{} tries": "{} 已嘗試", + "+ {num_bad_files} more": "+ {num_bad_files} 更多", + + "This is my site": "這是我的網站", + "Site title": "網站標題", + "Site description": "網站描述", + "Save site settings": "存儲網站設定", + "Open site directory": "打開所在資料夾", + + "Content publishing": "內容發布", + "Choose": "選擇", + "Sign": "簽署", + "Publish": "發布", + "Sign and publish": "簽名並發布", + "This function is disabled on this proxy": "此代理上禁用此功能", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置資料庫下載錯誤:{}!
    請手動下載並解壓到數據目錄:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下載 GeoLite2 地理位置資料庫 (僅一次,約 20MB )...", + "GeoLite2 City database downloaded!": "GeoLite2 地理位置資料庫已下載!", + + "Are you sure?": "你確定?", + "Site storage limit modified!": "網站存儲限制已變更!", + "Database schema reloaded!": "資料庫架構重新加載!", + "Database rebuilding....": "資料庫重建中...", + "Database rebuilt!": "資料庫已重建!", + "Site updated!": "網站已更新!", + "Delete this site": "刪除此網站", + "File write error: ": "檔案寫入錯誤:", + "Site settings saved!": "網站設置已保存!", + "Enter your private key:": "輸入您的私鑰:", + " Signed!": " 已簽署!", + "WebGL not supported": "不支援 WebGL" +} diff --git a/plugins/Sidebar/languages/zh.json b/plugins/Sidebar/languages/zh.json new file mode 100644 index 00000000..696084cf --- /dev/null +++ b/plugins/Sidebar/languages/zh.json @@ -0,0 +1,98 @@ +{ + "Copy to clipboard": "复制到剪切板", + "Peers": "节点数", + "Connected": "已连接", + "Connectable": "可连接", + "Onion": "洋葱点", + "Local": "局域网", + "Connectable peers": "可连接节点", + + "Data transfer": "数据传输", + "Received": "已接收", + "Received bytes": "已接收字节", + "Sent": "已发送", + "Sent bytes": "已发送字节", + + "Files": "文件", + "Save as .zip": "打包成zip文件", + "Total": "总计", + "Image": "图像", + "Other": "其他", + "User data": "用户数据", + + "Size limit": "大小限制", + "limit used": "限额", + "free space": "剩余空间", + "Set": "设置", + + "Optional files": "可选文件", + "Downloaded": "已下载", + "Download and help distribute all files": "下载并帮助分发所有文件", + "Auto download big file size limit": "自动下载大文件大小限制", + "Total size": "总大小", + "Downloaded files": "已下载文件", + + "Database": "数据库", + "search feeds": "搜索数据源", + "{feeds} query": "{feeds} 请求", + "Reload": "重载", + "Rebuild": "重建", + "No database found": "没有找到数据库", + + "Identity address": "身份地址", + "Change": "更改", + + "Site control": "站点控制", + "Update": "更新", + "Pause": "暂停", + "Resume": "恢复", + "Delete": "删除", + "Are you sure?": "您确定吗?", + + "Site address": "站点地址", + "Donate": "捐赠", + + "Needs to be updated": "需要更新", + "{} try": "{} 尝试", + "{} tries": "{} 已尝试", + "+ {num_bad_files} more": "+ {num_bad_files} 更多", + + "This is my site": "这是我的站点", + "Site title": "站点标题", + "Site description": "站点描述", + "Save site settings": "保存站点设置", + "Open site directory": "打开所在文件夹", + + "Content publishing": "内容发布", + "Add saved private key": "添加并保存私钥", + "Save": "保存", + "Private key saved.": "私钥已保存", + "Private key saved for site signing": "已保存用于站点签名的私钥", + "Forgot": "删除私钥", + "Saved private key removed": "保存的私钥已删除", + "Choose": "选择", + "Sign": "签名", + "Publish": "发布", + "Sign and publish": "签名并发布", + "This function is disabled on this proxy": "此功能在代理上被禁用", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置数据库下载错误:{}!
    请手动下载并解压在数据目录:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下载 GeoLite2 地理位置数据库 (仅需一次,约 20MB )...", + "GeoLite2 City database downloaded!": "GeoLite2 地理位置数据库已下载!", + + "Are you sure?": "您确定吗?", + "Site storage limit modified!": "站点存储限制已更改!", + "Database schema reloaded!": "数据库模式已重新加载!", + "Database rebuilding....": "数据库重建中...", + "Database rebuilt!": "数据库已重建!", + "Site updated!": "站点已更新!", + "Delete this site": "删除此站点", + "Blacklist": "黑名单", + "Blacklist this site": "拉黑此站点", + "Reason": "原因", + "Delete and Blacklist": "删除并拉黑", + "File write error: ": "文件写入错误:", + "Site settings saved!": "站点设置已保存!", + "Enter your private key:": "输入您的私钥:", + " Signed!": " 已签名!", + "WebGL not supported": "不支持 WebGL" +} diff --git a/plugins/Sidebar/media/Class.coffee b/plugins/Sidebar/media/Class.coffee new file mode 100644 index 00000000..d62ab25c --- /dev/null +++ b/plugins/Sidebar/media/Class.coffee @@ -0,0 +1,23 @@ +class Class + trace: true + + log: (args...) -> + return unless @trace + return if typeof console is 'undefined' + args.unshift("[#{@.constructor.name}]") + console.log(args...) + @ + + logStart: (name, args...) -> + return unless @trace + @logtimers or= {} + @logtimers[name] = +(new Date) + @log "#{name}", args..., "(started)" if args.length > 0 + @ + + logEnd: (name, args...) -> + ms = +(new Date)-@logtimers[name] + @log "#{name}", args..., "(Done in #{ms}ms)" + @ + +window.Class = Class \ No newline at end of file diff --git a/plugins/Sidebar/media/Internals.coffee b/plugins/Sidebar/media/Internals.coffee new file mode 100644 index 00000000..484ecdb7 --- /dev/null +++ b/plugins/Sidebar/media/Internals.coffee @@ -0,0 +1,60 @@ +class Internals extends Class + constructor: (@sidebar) -> + @tag = null + @opened = false + if window.top.location.hash == "#internals" + setTimeout (=> @open()), 10 + + createHtmltag: -> + @when_loaded = $.Deferred() + if not @container + @container = $(""" +
    +
    + + """) + @container.appendTo(document.body) + @tag = @container.find(".internals") + + open: => + @createHtmltag() + @sidebar.fixbutton_targety = @sidebar.page_height + @stopDragY() + + onOpened: => + @sidebar.onClosed() + @log "onOpened" + + onClosed: => + $(document.body).removeClass("body-internals") + + stopDragY: => + # Animate sidebar and iframe + if @sidebar.fixbutton_targety == @sidebar.fixbutton_inity + # Closed + targety = 0 + @opened = false + else + # Opened + targety = @sidebar.fixbutton_targety - @sidebar.fixbutton_inity + @onOpened() + @opened = true + + # Revent sidebar transitions + if @tag + @tag.css("transition", "0.5s ease-out") + @tag.css("transform", "translateY(#{targety}px)").one transitionEnd, => + @tag.css("transition", "") + if not @opened + @log "cleanup" + # Revert body transformations + @log "stopdrag", "opened:", @opened, targety + if not @opened + @onClosed() + +window.Internals = Internals \ No newline at end of file diff --git a/plugins/Sidebar/media/Internals.css b/plugins/Sidebar/media/Internals.css new file mode 100644 index 00000000..36b2489e --- /dev/null +++ b/plugins/Sidebar/media/Internals.css @@ -0,0 +1,17 @@ +.internals-container { width: 100%; z-index: 998; position: absolute; top: -100vh; } +.internals { background-color: #EEE; height: 100vh; transform: translateY(0px); } +.internals-middle {height: 0px; top: 50%; position: absolute; width: 100%; left: 50%; } + +.internals .mynode { + border: 0.5px solid #aaa; width: 50px; height: 50px; transform: rotateZ(45deg); margin-top: -25px; margin-left: -25px; + opacity: 1; display: inline-block; background-color: #EEE; z-index: 9; position: absolute; outline: 5px solid #EEE; +} +.internals .peers { width: 0px; height: 0px; position: absolute; left: -20px; top: -20px; text-align: center; } +.internals .peer { left: 0px; top: 0px; position: absolute; } +.internals .peer .icon { width: 20px; height: 20px; padding: 10px; display: inline-block; text-decoration: none; left: 200px; position: absolute; color: #666; } +.internals .peer .icon:before { content: "\25BC"; position: absolute; margin-top: 3px; margin-left: -1px; opacity: 0; transition: all 0.3s } +.internals .peer .icon:hover:before { opacity: 1; transition: none } +.internals .peer .line { + width: 187px; border-top: 1px solid #CCC; position: absolute; top: 20px; left: 20px; + transform: rotateZ(334deg); transform-origin: bottom left; +} \ No newline at end of file diff --git a/plugins/Sidebar/media/Menu.coffee b/plugins/Sidebar/media/Menu.coffee new file mode 100644 index 00000000..3e19fd9f --- /dev/null +++ b/plugins/Sidebar/media/Menu.coffee @@ -0,0 +1,49 @@ +class Menu + constructor: (@button) -> + @elem = $(".menu.template").clone().removeClass("template") + @elem.appendTo("body") + @items = [] + + show: -> + if window.visible_menu and window.visible_menu.button[0] == @button[0] # Same menu visible then hide it + window.visible_menu.hide() + @hide() + else + button_pos = @button.offset() + left = button_pos.left + @elem.css({"top": button_pos.top+@button.outerHeight(), "left": left}) + @button.addClass("menu-active") + @elem.addClass("visible") + if @elem.position().left + @elem.width() + 20 > window.innerWidth + @elem.css("left", window.innerWidth - @elem.width() - 20) + if window.visible_menu then window.visible_menu.hide() + window.visible_menu = @ + + + hide: -> + @elem.removeClass("visible") + @button.removeClass("menu-active") + window.visible_menu = null + + + addItem: (title, cb) -> + item = $(".menu-item.template", @elem).clone().removeClass("template") + item.html(title) + item.on "click", => + if not cb(item) + @hide() + return false + item.appendTo(@elem) + @items.push item + return item + + + log: (args...) -> + console.log "[Menu]", args... + +window.Menu = Menu + +# Hide menu on outside click +$("body").on "click", (e) -> + if window.visible_menu and e.target != window.visible_menu.button[0] and $(e.target).parent()[0] != window.visible_menu.elem[0] + window.visible_menu.hide() diff --git a/plugins/Sidebar/media/Menu.css b/plugins/Sidebar/media/Menu.css new file mode 100644 index 00000000..e2afa16e --- /dev/null +++ b/plugins/Sidebar/media/Menu.css @@ -0,0 +1,19 @@ +.menu { + background-color: white; padding: 10px 0px; position: absolute; top: 0px; left: 0px; max-height: 0px; overflow: hidden; transform: translate(0px, -30px); pointer-events: none; + box-shadow: 0px 2px 8px rgba(0,0,0,0.3); border-radius: 2px; opacity: 0; transition: opacity 0.2s ease-out, transform 1s ease-out, max-height 0.2s ease-in-out; +} +.menu.visible { opacity: 1; max-height: 350px; transform: translate(0px, 0px); transition: opacity 0.1s ease-out, transform 0.3s ease-out, max-height 0.3s ease-in-out; pointer-events: all } + +.menu-item { display: block; text-decoration: none; color: black; padding: 6px 24px; transition: all 0.2s; border-bottom: none; font-weight: normal; padding-left: 30px; } +.menu-item-separator { margin-top: 5px; border-top: 1px solid #eee } + +.menu-item:hover { background-color: #F6F6F6; transition: none; color: inherit; border: none } +.menu-item:active, .menu-item:focus { background-color: #AF3BFF; color: white; transition: none } +.menu-item.selected:before { + content: "L"; display: inline-block; transform: rotateZ(45deg) scaleX(-1); + font-weight: bold; position: absolute; margin-left: -17px; font-size: 12px; margin-top: 2px; +} + +@media only screen and (max-width: 800px) { +.menu, .menu.visible { position: absolute; left: unset !important; right: 20px; } +} \ No newline at end of file diff --git a/plugins/Sidebar/media/RateLimit.coffee b/plugins/Sidebar/media/RateLimit.coffee new file mode 100644 index 00000000..17c67433 --- /dev/null +++ b/plugins/Sidebar/media/RateLimit.coffee @@ -0,0 +1,14 @@ +limits = {} +call_after_interval = {} +window.RateLimit = (interval, fn) -> + if not limits[fn] + call_after_interval[fn] = false + fn() # First call is not delayed + limits[fn] = setTimeout (-> + if call_after_interval[fn] + fn() + delete limits[fn] + delete call_after_interval[fn] + ), interval + else # Called within iterval, delay the call + call_after_interval[fn] = true diff --git a/plugins/Sidebar/media/Scrollable.js b/plugins/Sidebar/media/Scrollable.js new file mode 100644 index 00000000..689a5719 --- /dev/null +++ b/plugins/Sidebar/media/Scrollable.js @@ -0,0 +1,91 @@ +/* via http://jsfiddle.net/elGrecode/00dgurnn/ */ + +window.initScrollable = function () { + + var scrollContainer = document.querySelector('.scrollable'), + scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'), + scrollContent = document.querySelector('.scrollable .content'), + contentPosition = 0, + scrollerBeingDragged = false, + scroller, + topPosition, + scrollerHeight; + + function calculateScrollerHeight() { + // *Calculation of how tall scroller should be + var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight; + if (visibleRatio == 1) + scroller.style.display = "none"; + else + scroller.style.display = "block"; + return visibleRatio * scrollContainer.offsetHeight; + } + + function moveScroller(evt) { + // Move Scroll bar to top offset + var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight; + topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box + scroller.style.top = topPosition + 'px'; + } + + function startDrag(evt) { + normalizedPosition = evt.pageY; + contentPosition = scrollContentWrapper.scrollTop; + scrollerBeingDragged = true; + window.addEventListener('mousemove', scrollBarScroll); + return false; + } + + function stopDrag(evt) { + scrollerBeingDragged = false; + window.removeEventListener('mousemove', scrollBarScroll); + } + + function scrollBarScroll(evt) { + if (scrollerBeingDragged === true) { + evt.preventDefault(); + var mouseDifferential = evt.pageY - normalizedPosition; + var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight); + scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent; + } + } + + function updateHeight() { + scrollerHeight = calculateScrollerHeight() - 10; + scroller.style.height = scrollerHeight + 'px'; + } + + function createScroller() { + // *Creates scroller element and appends to '.scrollable' div + // create scroller element + scroller = document.createElement("div"); + scroller.className = 'scroller'; + + // determine how big scroller should be based on content + scrollerHeight = calculateScrollerHeight() - 10; + + if (scrollerHeight / scrollContainer.offsetHeight < 1) { + // *If there is a need to have scroll bar based on content size + scroller.style.height = scrollerHeight + 'px'; + + // append scroller to scrollContainer div + scrollContainer.appendChild(scroller); + + // show scroll path divot + scrollContainer.className += ' showScroll'; + + // attach related draggable listeners + scroller.addEventListener('mousedown', startDrag); + window.addEventListener('mouseup', stopDrag); + } + + } + + createScroller(); + + + // *** Listeners *** + scrollContentWrapper.addEventListener('scroll', moveScroller); + + return updateHeight; +}; \ No newline at end of file diff --git a/plugins/Sidebar/media/Scrollbable.css b/plugins/Sidebar/media/Scrollbable.css new file mode 100644 index 00000000..6e3e0b6a --- /dev/null +++ b/plugins/Sidebar/media/Scrollbable.css @@ -0,0 +1,44 @@ +.scrollable { + overflow: hidden; +} + +.scrollable.showScroll::after { + position: absolute; + content: ''; + top: 5%; + right: 7px; + height: 90%; + width: 3px; + background: rgba(224, 224, 255, .3); +} + +.scrollable .content-wrapper { + width: 100%; + height: 100%; + padding-right: 50%; + overflow-y: scroll; +} +.scroller { + margin-top: 5px; + z-index: 5; + cursor: pointer; + position: absolute; + width: 7px; + border-radius: 5px; + background: #3A3A3A; + top: 0px; + left: 395px; + -webkit-transition: top .08s; + -moz-transition: top .08s; + -ms-transition: top .08s; + -o-transition: top .08s; + transition: top .08s; +} +.scroller { + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} diff --git a/plugins/Sidebar/media/Sidebar.coffee b/plugins/Sidebar/media/Sidebar.coffee new file mode 100644 index 00000000..ab9f5316 --- /dev/null +++ b/plugins/Sidebar/media/Sidebar.coffee @@ -0,0 +1,616 @@ +class Sidebar extends Class + constructor: (@wrapper) -> + @tag = null + @container = null + @opened = false + @width = 410 + @internals = new Internals(@) + @fixbutton = $(".fixbutton") + @fixbutton_addx = 0 + @fixbutton_addy = 0 + @fixbutton_initx = 0 + @fixbutton_inity = 15 + @fixbutton_targetx = 0 + @move_lock = null + @page_width = $(window).width() + @page_height = $(window).height() + @frame = $("#inner-iframe") + @initFixbutton() + @dragStarted = 0 + @globe = null + @preload_html = null + + @original_set_site_info = @wrapper.setSiteInfo # We going to override this, save the original + + # Start in opened state for debugging + if false + @startDrag() + @moved() + @fixbutton_targetx = @fixbutton_initx - @width + @stopDrag() + + + initFixbutton: -> + + # Detect dragging + @fixbutton.on "mousedown touchstart", (e) => + if e.button > 0 # Right or middle click + return + e.preventDefault() + + # Disable previous listeners + @fixbutton.off "click touchend touchcancel" + + # Make sure its not a click + @dragStarted = (+ new Date) + + # Fullscreen drag bg to capture mouse events over iframe + $(".drag-bg").remove() + $("
    ").appendTo(document.body) + + $("body").one "mousemove touchmove", (e) => + mousex = e.pageX + mousey = e.pageY + if not mousex + mousex = e.originalEvent.touches[0].pageX + mousey = e.originalEvent.touches[0].pageY + + @fixbutton_addx = @fixbutton.offset().left - mousex + @fixbutton_addy = @fixbutton.offset().top - mousey + @startDrag() + @fixbutton.parent().on "click touchend touchcancel", (e) => + if (+ new Date) - @dragStarted < 100 + window.top.location = @fixbutton.find(".fixbutton-bg").attr("href") + @stopDrag() + @resized() + $(window).on "resize", @resized + + resized: => + @page_width = $(window).width() + @page_height = $(window).height() + @fixbutton_initx = @page_width - 75 # Initial x position + if @opened + @fixbutton.css + left: @fixbutton_initx - @width + else + @fixbutton.css + left: @fixbutton_initx + + # Start dragging the fixbutton + startDrag: -> + @move_lock = "x" # Temporary until internals not finished + @log "startDrag" + @fixbutton_targetx = @fixbutton_initx # Fallback x position + + @fixbutton.addClass("dragging") + + # IE position wrap fix + if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0 + @fixbutton.css("pointer-events", "none") + + # Don't go to homepage + @fixbutton.one "click", (e) => + @stopDrag() + @fixbutton.removeClass("dragging") + moved_x = Math.abs(@fixbutton.offset().left - @fixbutton_initx) + moved_y = Math.abs(@fixbutton.offset().top - @fixbutton_inity) + if moved_x > 5 or moved_y > 10 + # If moved more than some pixel the button then don't go to homepage + e.preventDefault() + + # Animate drag + @fixbutton.parents().on "mousemove touchmove", @animDrag + @fixbutton.parents().on "mousemove touchmove" ,@waitMove + + # Stop dragging listener + @fixbutton.parents().one "mouseup touchend touchcancel", (e) => + e.preventDefault() + @stopDrag() + + + # Wait for moving the fixbutton + waitMove: (e) => + document.body.style.perspective = "1000px" + document.body.style.height = "100%" + document.body.style.willChange = "perspective" + document.documentElement.style.height = "100%" + #$(document.body).css("backface-visibility", "hidden").css("perspective", "1000px").css("height", "900px") + # $("iframe").css("backface-visibility", "hidden") + + moved_x = Math.abs(parseInt(@fixbutton[0].style.left) - @fixbutton_targetx) + moved_y = Math.abs(parseInt(@fixbutton[0].style.top) - @fixbutton_targety) + if moved_x > 5 and (+ new Date) - @dragStarted + moved_x > 50 + @moved("x") + @fixbutton.stop().animate {"top": @fixbutton_inity}, 1000 + @fixbutton.parents().off "mousemove touchmove" ,@waitMove + + else if moved_y > 5 and (+ new Date) - @dragStarted + moved_y > 50 + @moved("y") + @fixbutton.parents().off "mousemove touchmove" ,@waitMove + + moved: (direction) -> + @log "Moved", direction + @move_lock = direction + if direction == "y" + $(document.body).addClass("body-internals") + return @internals.createHtmltag() + @createHtmltag() + $(document.body).addClass("body-sidebar") + @container.on "mousedown touchend touchcancel", (e) => + if e.target != e.currentTarget + return true + @log "closing" + if $(document.body).hasClass("body-sidebar") + @close() + return true + + $(window).off "resize" + $(window).on "resize", => + $(document.body).css "height", $(window).height() + @scrollable() + @resized() + + # Override setsiteinfo to catch changes + @wrapper.setSiteInfo = (site_info) => + @setSiteInfo(site_info) + @original_set_site_info.apply(@wrapper, arguments) + + # Preload world.jpg + img = new Image(); + img.src = "/uimedia/globe/world.jpg"; + + setSiteInfo: (site_info) -> + RateLimit 1500, => + @updateHtmlTag() + RateLimit 30000, => + @displayGlobe() + + # Create the sidebar html tag + createHtmltag: -> + @when_loaded = $.Deferred() + if not @container + @container = $(""" + + """) + @container.appendTo(document.body) + @tag = @container.find(".sidebar") + @updateHtmlTag() + @scrollable = window.initScrollable() + + + updateHtmlTag: -> + if @preload_html + @setHtmlTag(@preload_html) + @preload_html = null + else + @wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag + + setHtmlTag: (res) => + if @tag.find(".content").children().length == 0 # First update + @log "Creating content" + @container.addClass("loaded") + morphdom(@tag.find(".content")[0], '
    '+res+'
    ') + # @scrollable() + @when_loaded.resolve() + + else # Not first update, patch the html to keep unchanged dom elements + morphdom @tag.find(".content")[0], '
    '+res+'
    ', { + onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state + if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0 + return false + else + return true + } + + # Save and forgot privatekey for site signing + @tag.find("#privatekey-add").off("click, touchend").on "click touchend", (e) => + @wrapper.displayPrompt "Enter your private key:", "password", "Save", "", (privatekey) => + @wrapper.ws.cmd "userSetSitePrivatekey", [privatekey], (res) => + @wrapper.notifications.add "privatekey", "done", "Private key saved for site signing", 5000 + return false + + @tag.find("#privatekey-forgot").off("click, touchend").on "click touchend", (e) => + @wrapper.displayConfirm "Remove saved private key for this site?", "Forgot", (res) => + if not res + return false + @wrapper.ws.cmd "userSetSitePrivatekey", [""], (res) => + @wrapper.notifications.add "privatekey", "done", "Saved private key removed", 5000 + return false + + + + animDrag: (e) => + mousex = e.pageX + mousey = e.pageY + if not mousex and e.originalEvent.touches + mousex = e.originalEvent.touches[0].pageX + mousey = e.originalEvent.touches[0].pageY + + overdrag = @fixbutton_initx - @width - mousex + if overdrag > 0 # Overdragged + overdrag_percent = 1 + overdrag/300 + mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent) + targetx = @fixbutton_initx - mousex - @fixbutton_addx + targety = @fixbutton_inity - mousey - @fixbutton_addy + + if @move_lock == "x" + targety = @fixbutton_inity + else if @move_lock == "y" + targetx = @fixbutton_initx + + if not @move_lock or @move_lock == "x" + @fixbutton[0].style.left = (mousex + @fixbutton_addx) + "px" + if @tag + @tag[0].style.transform = "translateX(#{0 - targetx}px)" + + if not @move_lock or @move_lock == "y" + @fixbutton[0].style.top = (mousey + @fixbutton_addy) + "px" + if @internals.tag + @internals.tag[0].style.transform = "translateY(#{0 - targety}px)" + + #if @move_lock == "x" + # @fixbutton[0].style.left = "#{@fixbutton_targetx} px" + #@fixbutton[0].style.top = "#{@fixbutton_inity}px" + #if @move_lock == "y" + # @fixbutton[0].style.top = "#{@fixbutton_targety} px" + + # Check if opened + if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9) + @fixbutton_targetx = @fixbutton_initx - @width # Make it opened + else + @fixbutton_targetx = @fixbutton_initx + + if (not @internals.opened and 0 - targety > @page_height/10) or (@internals.opened and 0 - targety > @page_height*0.95) + @fixbutton_targety = @page_height - @fixbutton_inity - 50 + else + @fixbutton_targety = @fixbutton_inity + + + # Stop dragging the fixbutton + stopDrag: -> + @fixbutton.parents().off "mousemove touchmove" + @fixbutton.off "mousemove touchmove" + @fixbutton.css("pointer-events", "") + $(".drag-bg").remove() + if not @fixbutton.hasClass("dragging") + return + @fixbutton.removeClass("dragging") + + # Move back to initial position + if @fixbutton_targetx != @fixbutton.offset().left + # Animate fixbutton + if @move_lock == "y" + top = @fixbutton_targety + left = @fixbutton_initx + if @move_lock == "x" + top = @fixbutton_inity + left = @fixbutton_targetx + @fixbutton.stop().animate {"left": left, "top": top}, 500, "easeOutBack", => + # Switch back to auto align + if @fixbutton_targetx == @fixbutton_initx # Closed + @fixbutton.css("left", "auto") + else # Opened + @fixbutton.css("left", left) + + $(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status + + @stopDragX() + @internals.stopDragY() + @move_lock = null + + stopDragX: -> + # Animate sidebar and iframe + if @fixbutton_targetx == @fixbutton_initx or @move_lock == "y" + # Closed + targetx = 0 + @opened = false + else + # Opened + targetx = @width + if @opened + @onOpened() + else + @when_loaded.done => + @onOpened() + @opened = true + + # Revent sidebar transitions + if @tag + @tag.css("transition", "0.4s ease-out") + @tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, => + @tag.css("transition", "") + if not @opened + @container.remove() + @container = null + if @tag + @tag.remove() + @tag = null + + # Revert body transformations + @log "stopdrag", "opened:", @opened + if not @opened + @onClosed() + + sign: (inner_path, privatekey) -> + @wrapper.displayProgress("sign", "Signing: #{inner_path}...", 0) + @wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) => + if res == "ok" + @wrapper.displayProgress("sign", "#{inner_path} signed!", 100) + else + @wrapper.displayProgress("sign", "Error signing #{inner_path}", -1) + + publish: (inner_path, privatekey) -> + @wrapper.ws.cmd "sitePublish", {privatekey: privatekey, inner_path: inner_path, sign: true, update_changed_files: true}, (res) => + if res == "ok" + @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000 + + onOpened: -> + @log "Opened" + @scrollable() + + # Re-calculate height when site admin opened or closed + @tag.find("#checkbox-owned, #checkbox-autodownloadoptional").off("click touchend").on "click touchend", => + setTimeout (=> + @scrollable() + ), 300 + + # Site limit button + @tag.find("#button-sitelimit").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), (res) => + if res == "ok" + @wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000 + @updateHtmlTag() + return false + + # Site autodownload limit button + @tag.find("#button-autodownload_bigfile_size_limit").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetAutodownloadBigfileLimit", $("#input-autodownload_bigfile_size_limit").val(), (res) => + if res == "ok" + @wrapper.notifications.add "done-bigfilelimit", "done", "Site bigfile auto download limit modified!", 5000 + @updateHtmlTag() + return false + + # Database reload + @tag.find("#button-dbreload").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "dbReload", [], => + @wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000 + @updateHtmlTag() + return false + + # Database rebuild + @tag.find("#button-dbrebuild").off("click touchend").on "click touchend", => + @wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...." + @wrapper.ws.cmd "dbRebuild", [], => + @wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000 + @updateHtmlTag() + return false + + # Update site + @tag.find("#button-update").off("click touchend").on "click touchend", => + @tag.find("#button-update").addClass("loading") + @wrapper.ws.cmd "siteUpdate", @wrapper.site_info.address, => + @wrapper.notifications.add "done-updated", "done", "Site updated!", 5000 + @tag.find("#button-update").removeClass("loading") + return false + + # Pause site + @tag.find("#button-pause").off("click touchend").on "click touchend", => + @tag.find("#button-pause").addClass("hidden") + @wrapper.ws.cmd "sitePause", @wrapper.site_info.address + return false + + # Resume site + @tag.find("#button-resume").off("click touchend").on "click touchend", => + @tag.find("#button-resume").addClass("hidden") + @wrapper.ws.cmd "siteResume", @wrapper.site_info.address + return false + + # Delete site + @tag.find("#button-delete").off("click touchend").on "click touchend", => + @wrapper.displayConfirm "Are you sure?", ["Delete this site", "Blacklist"], (confirmed) => + if confirmed == 1 + @tag.find("#button-delete").addClass("loading") + @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, -> + document.location = $(".fixbutton-bg").attr("href") + else if confirmed == 2 + @wrapper.displayPrompt "Blacklist this site", "text", "Delete and Blacklist", "Reason", (reason) => + @tag.find("#button-delete").addClass("loading") + @wrapper.ws.cmd "siteblockAdd", [@wrapper.site_info.address, reason] + @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, -> + document.location = $(".fixbutton-bg").attr("href") + + + return false + + # Owned checkbox + @tag.find("#checkbox-owned").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] + + # Owned checkbox + @tag.find("#checkbox-autodownloadoptional").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")] + + # Change identity button + @tag.find("#button-identity").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "certSelect" + return false + + # Save settings + @tag.find("#button-settings").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "fileGet", "content.json", (res) => + data = JSON.parse(res) + data["title"] = $("#settings-title").val() + data["description"] = $("#settings-description").val() + json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t'))) + @wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) => + if res != "ok" # fileWrite failed + @wrapper.notifications.add "file-write", "error", "File write error: #{res}" + else + @wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000 + if @wrapper.site_info.privatekey + @wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: "content.json", update_changed_files: true} + @updateHtmlTag() + return false + + + # Open site directory + @tag.find("#link-directory").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "serverShowdirectory", ["site", @wrapper.site_info.address] + return false + + # Copy site with peers + @tag.find("#link-copypeers").off("click touchend").on "click touchend", (e) => + copy_text = e.currentTarget.href + handler = (e) => + e.clipboardData.setData('text/plain', copy_text) + e.preventDefault() + @wrapper.notifications.add "copy", "done", "Site address with peers copied to your clipboard", 5000 + document.removeEventListener('copy', handler, true) + + document.addEventListener('copy', handler, true) + document.execCommand('copy') + return false + + # Sign and publish content.json + $(document).on "click touchend", => + @tag?.find("#button-sign-publish-menu").removeClass("visible") + @tag?.find(".contents + .flex").removeClass("sign-publish-flex") + + @tag.find(".contents-content").off("click touchend").on "click touchend", (e) => + $("#input-contents").val(e.currentTarget.innerText); + return false; + + menu = new Menu(@tag.find("#menu-sign-publish")) + menu.elem.css("margin-top", "-130px") # Open upwards + menu.addItem "Sign", => + inner_path = @tag.find("#input-contents").val() + + @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (rules) => + if @wrapper.site_info.auth_address in rules.signers + # ZeroID or other ID provider + @sign(inner_path) + else if @wrapper.site_info.privatekey + # Privatekey stored in users.json + @sign(inner_path, "stored") + else + # Ask the user for privatekey + @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key + @sign(inner_path, privatekey) + + @tag.find(".contents + .flex").removeClass "active" + menu.hide() + + menu.addItem "Publish", => + inner_path = @tag.find("#input-contents").val() + @wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false} + + @tag.find(".contents + .flex").removeClass "active" + menu.hide() + + @tag.find("#menu-sign-publish").off("click touchend").on "click touchend", => + if window.visible_menu == menu + @tag.find(".contents + .flex").removeClass "active" + menu.hide() + else + @tag.find(".contents + .flex").addClass "active" + @tag.find(".content-wrapper").prop "scrollTop", 10000 + menu.show() + return false + + $("body").on "click", => + if @tag + @tag.find(".contents + .flex").removeClass "active" + + @tag.find("#button-sign-publish").off("click touchend").on "click touchend", => + inner_path = @tag.find("#input-contents").val() + + @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (rules) => + if @wrapper.site_info.auth_address in rules.signers + # ZeroID or other ID provider + @publish(inner_path, null) + else if @wrapper.site_info.privatekey + # Privatekey stored in users.json + @publish(inner_path, "stored") + else + # Ask the user for privatekey + @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key + @publish(inner_path, privatekey) + return false + + # Close + @tag.find(".close").off("click touchend").on "click touchend", (e) => + @close() + return false + + @loadGlobe() + + close: -> + @move_lock = "x" + @startDrag() + @stopDrag() + + + onClosed: -> + $(window).off "resize" + $(window).on "resize", @resized + $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) => + if e.target == document.body and not $(document.body).hasClass("body-sidebar") and not $(document.body).hasClass("body-internals") + $(document.body).css("height", "auto").css("perspective", "").css("will-change", "").css("transition", "").off transitionEnd + @unloadGlobe() + + # We dont need site info anymore + @wrapper.setSiteInfo = @original_set_site_info + + + loadGlobe: => + if @tag.find(".globe").hasClass("loading") + setTimeout (=> + if typeof(DAT) == "undefined" # Globe script not loaded, do it first + script_tag = $(" + + diff --git a/plugins/UiConfig/media/css/Config.css b/plugins/UiConfig/media/css/Config.css new file mode 100644 index 00000000..98291d33 --- /dev/null +++ b/plugins/UiConfig/media/css/Config.css @@ -0,0 +1,68 @@ +body { background-color: #EDF2F5; font-family: Roboto, 'Segoe UI', Arial, 'Helvetica Neue'; margin: 0px; padding: 0px; backface-visibility: hidden; } +h1, h2, h3, h4 { font-family: 'Roboto', Arial, sans-serif; font-weight: 200; font-size: 30px; margin: 0px; padding: 0px } +h2 { margin-top: 10px; } +h3 { font-weight: normal } +h1 { background: linear-gradient(33deg,#af3bff,#0d99c9); color: white; padding: 16px 30px; } +a { color: #9760F9 } +a:hover { text-decoration: none } + +.link { background-color: transparent; outline: 5px solid transparent; transition: all 0.3s } +.link:active { background-color: #EFEFEF; outline: 5px solid #EFEFEF; transition: none } + +.content { max-width: 800px; margin: auto; background-color: white; padding: 60px 20px; box-sizing: border-box; padding-bottom: 150px; } +.section { margin: 0px 10%; } +.config-items { font-size: 19px; margin-top: 25px; margin-bottom: 75px; } +.config-item { transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); position: relative; padding-bottom: 20px; padding-top: 10px; } +.config-item.hidden { opacity: 0; height: 0px; padding: 0px; } +.config-item .title { display: inline-block; line-height: 36px; } +.config-item .title h3 { font-size: 20px; font-weight: lighter; margin-right: 100px; } +.config-item .description { font-size: 14px; color: #666; line-height: 24px; } +.config-item .value { display: inline-block; white-space: nowrap; } +.config-item .value-right { right: 0px; position: absolute; } +.config-item .value-fullwidth { width: 100% } +.config-item .marker { + font-weight: bold; text-decoration: none; font-size: 25px; position: absolute; padding: 2px 15px; line-height: 32px; + opacity: 0; pointer-events: none; transition: all 0.6s; transform: scale(2); color: #9760F9; +} +.config-item .marker.visible { opacity: 1; pointer-events: all; transform: scale(1); } +.config-item .marker.changed { color: #2ecc71; } +.config-item .marker.pending { color: #ffa200; } + + +.input-text, .input-select { padding: 8px 18px; border: 1px solid #CCC; border-radius: 3px; font-size: 17px; box-sizing: border-box; } +.input-text:focus, .input-select:focus { border: 1px solid #3396ff; outline: none; } +.input-textarea { overflow-x: auto; overflow-y: hidden; white-space: pre; line-height: 22px; } + +.input-select { width: initial; font-size: 14px; padding-right: 10px; padding-left: 10px; } + +.value-right .input-text { text-align: right; width: 100px; } +.value-fullwidth .input-text { width: 100%; font-size: 14px; font-family: 'Segoe UI', Arial, 'Helvetica Neue'; } +.value-fullwidth { margin-top: 10px; } + +/* Checkbox */ +.checkbox-skin { background-color: #CCC; width: 50px; height: 24px; border-radius: 15px; transition: all 0.3s ease-in-out; display: inline-block; } +.checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; border-radius: 100%; margin-top: 2px; margin-left: 2px; + transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); +} +.checkbox { font-size: 14px; font-weight: normal; display: inline-block; cursor: pointer; margin-top: 5px; } +.checkbox .title { display: inline; line-height: 30px; vertical-align: 4px; margin-left: 11px } +.checkbox.checked .checkbox-skin:before { margin-left: 27px; } +.checkbox.checked .checkbox-skin { background-color: #2ECC71 } + +/* Bottom */ + +.bottom { + width: 100%; text-align: center; background-color: #ffffffde; padding: 25px; bottom: -120px; + transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); position: fixed; backface-visibility: hidden; box-sizing: border-box; +} +.bottom-content { max-width: 750px; width: 100%; margin: 0px auto; } +.bottom .button { float: right; } +.bottom.visible { bottom: 0px; box-shadow: 0px 0px 35px #dcdcdc; } +.bottom .title { padding: 10px 10px; color: #363636; float: left; text-transform: uppercase; letter-spacing: 1px; } +.bottom .title:before { content: "•"; display: inline-block; color: #2ecc71; font-size: 31px; vertical-align: -7px; margin-right: 8px; line-height: 25px; } +.bottom-restart .title:before { color: #ffa200; } + +.animate { transition: all 0.3s ease-out !important; } +.animate-back { transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; } +.animate-inout { transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; } \ No newline at end of file diff --git a/plugins/UiConfig/media/css/all.css b/plugins/UiConfig/media/css/all.css new file mode 100644 index 00000000..7bb0087a --- /dev/null +++ b/plugins/UiConfig/media/css/all.css @@ -0,0 +1,125 @@ + + +/* ---- plugins/UiConfig/media/css/Config.css ---- */ + + +body { background-color: #EDF2F5; font-family: Roboto, 'Segoe UI', Arial, 'Helvetica Neue'; margin: 0px; padding: 0px; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; } +h1, h2, h3, h4 { font-family: 'Roboto', Arial, sans-serif; font-weight: 200; font-size: 30px; margin: 0px; padding: 0px } +h2 { margin-top: 10px; } +h3 { font-weight: normal } +h1 { background: -webkit-linear-gradient(33deg,#af3bff,#0d99c9);background: -moz-linear-gradient(33deg,#af3bff,#0d99c9);background: -o-linear-gradient(33deg,#af3bff,#0d99c9);background: -ms-linear-gradient(33deg,#af3bff,#0d99c9);background: linear-gradient(33deg,#af3bff,#0d99c9); color: white; padding: 16px 30px; } +a { color: #9760F9 } +a:hover { text-decoration: none } + +.link { background-color: transparent; outline: 5px solid transparent; -webkit-transition: all 0.3s ; -moz-transition: all 0.3s ; -o-transition: all 0.3s ; -ms-transition: all 0.3s ; transition: all 0.3s } +.link:active { background-color: #EFEFEF; outline: 5px solid #EFEFEF; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none } + +.content { max-width: 800px; margin: auto; background-color: white; padding: 60px 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; padding-bottom: 150px; } +.section { margin: 0px 10%; } +.config-items { font-size: 19px; margin-top: 25px; margin-bottom: 75px; } +.config-item { -webkit-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -moz-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -o-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -ms-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1) ; position: relative; padding-bottom: 20px; padding-top: 10px; } +.config-item.hidden { opacity: 0; height: 0px; padding: 0px; } +.config-item .title { display: inline-block; line-height: 36px; } +.config-item .title h3 { font-size: 20px; font-weight: lighter; margin-right: 100px; } +.config-item .description { font-size: 14px; color: #666; line-height: 24px; } +.config-item .value { display: inline-block; white-space: nowrap; } +.config-item .value-right { right: 0px; position: absolute; } +.config-item .value-fullwidth { width: 100% } +.config-item .marker { + font-weight: bold; text-decoration: none; font-size: 25px; position: absolute; padding: 2px 15px; line-height: 32px; + opacity: 0; pointer-events: none; -webkit-transition: all 0.6s; -moz-transition: all 0.6s; -o-transition: all 0.6s; -ms-transition: all 0.6s; transition: all 0.6s ; -webkit-transform: scale(2); -moz-transform: scale(2); -o-transform: scale(2); -ms-transform: scale(2); transform: scale(2) ; color: #9760F9; +} +.config-item .marker.visible { opacity: 1; pointer-events: all; -webkit-transform: scale(1); -moz-transform: scale(1); -o-transform: scale(1); -ms-transform: scale(1); transform: scale(1) ; } +.config-item .marker.changed { color: #2ecc71; } +.config-item .marker.pending { color: #ffa200; } + + +.input-text, .input-select { padding: 8px 18px; border: 1px solid #CCC; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; font-size: 17px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; } +.input-text:focus, .input-select:focus { border: 1px solid #3396ff; outline: none; } +.input-textarea { overflow-x: auto; overflow-y: hidden; white-space: pre; line-height: 22px; } + +.input-select { width: initial; font-size: 14px; padding-right: 10px; padding-left: 10px; } + +.value-right .input-text { text-align: right; width: 100px; } +.value-fullwidth .input-text { width: 100%; font-size: 14px; font-family: 'Segoe UI', Arial, 'Helvetica Neue'; } +.value-fullwidth { margin-top: 10px; } + +/* Checkbox */ +.checkbox-skin { background-color: #CCC; width: 50px; height: 24px; -webkit-border-radius: 15px; -moz-border-radius: 15px; -o-border-radius: 15px; -ms-border-radius: 15px; border-radius: 15px ; -webkit-transition: all 0.3s ease-in-out; -moz-transition: all 0.3s ease-in-out; -o-transition: all 0.3s ease-in-out; -ms-transition: all 0.3s ease-in-out; transition: all 0.3s ease-in-out ; display: inline-block; } +.checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; -webkit-border-radius: 100%; -moz-border-radius: 100%; -o-border-radius: 100%; -ms-border-radius: 100%; border-radius: 100% ; margin-top: 2px; margin-left: 2px; + -webkit-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -moz-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -o-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -ms-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86) ; +} +.checkbox { font-size: 14px; font-weight: normal; display: inline-block; cursor: pointer; margin-top: 5px; } +.checkbox .title { display: inline; line-height: 30px; vertical-align: 4px; margin-left: 11px } +.checkbox.checked .checkbox-skin:before { margin-left: 27px; } +.checkbox.checked .checkbox-skin { background-color: #2ECC71 } + +/* Bottom */ + +.bottom { + width: 100%; text-align: center; background-color: #ffffffde; padding: 25px; bottom: -120px; + -webkit-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -moz-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -o-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -ms-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1) ; position: fixed; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; +} +.bottom-content { max-width: 750px; width: 100%; margin: 0px auto; } +.bottom .button { float: right; } +.bottom.visible { bottom: 0px; -webkit-box-shadow: 0px 0px 35px #dcdcdc; -moz-box-shadow: 0px 0px 35px #dcdcdc; -o-box-shadow: 0px 0px 35px #dcdcdc; -ms-box-shadow: 0px 0px 35px #dcdcdc; box-shadow: 0px 0px 35px #dcdcdc ; } +.bottom .title { padding: 10px 10px; color: #363636; float: left; text-transform: uppercase; letter-spacing: 1px; } +.bottom .title:before { content: "•"; display: inline-block; color: #2ecc71; font-size: 31px; vertical-align: -7px; margin-right: 8px; line-height: 25px; } +.bottom-restart .title:before { color: #ffa200; } + +.animate { -webkit-transition: all 0.3s ease-out !important; -moz-transition: all 0.3s ease-out !important; -o-transition: all 0.3s ease-out !important; -ms-transition: all 0.3s ease-out !important; transition: all 0.3s ease-out !important ; } +.animate-back { -webkit-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -moz-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -o-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -ms-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important ; } +.animate-inout { -webkit-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -moz-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -o-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -ms-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important ; } + + +/* ---- plugins/UiConfig/media/css/button.css ---- */ + + +/* Button */ +.button { + background-color: #FFDC00; color: black; padding: 10px 20px; display: inline-block; background-position: left center; + -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; border-bottom: 2px solid #E8BE29; -webkit-transition: all 0.5s ease-out; -moz-transition: all 0.5s ease-out; -o-transition: all 0.5s ease-out; -ms-transition: all 0.5s ease-out; transition: all 0.5s ease-out ; text-decoration: none; +} +.button:hover { border-color: white; border-bottom: 2px solid #BD960C; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none ; background-color: #FDEB07 } +.button:active { position: relative; top: 1px } +.button.loading { + color: rgba(0,0,0,0); background: #999 url(../img/loading.gif) no-repeat center center; + -webkit-transition: all 0.5s ease-out ; -moz-transition: all 0.5s ease-out ; -o-transition: all 0.5s ease-out ; -ms-transition: all 0.5s ease-out ; transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666 +} +.button.disabled { color: #DDD; background-color: #999; pointer-events: none; border-bottom: 2px solid #666 } + + +/* ---- plugins/UiConfig/media/css/fonts.css ---- */ + + +/* Base64 encoder: http://www.motobit.com/util/base64-decoder-encoder.asp */ +/* Generated by Font Squirrel (http://www.fontsquirrel.com) on January 21, 2015 */ + + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 400; + src: + local('Roboto'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAGfcABIAAAAAx5wAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHREVGAAABlAAAAEcAAABYB30Hd0dQT1MAAAHcAAAH8AAAFLywggk9R1NVQgAACcwAAACmAAABFMK7zVBPUy8yAAAKdAAAAFYAAABgoKexpmNtYXAAAArMAAADZAAABnjIFMucY3Z0IAAADjAAAABMAAAATCRBBuVmcGdtAAAOfAAAATsAAAG8Z/Rcq2dhc3AAAA+4AAAADAAAAAwACAATZ2x5ZgAAD8QAAE7fAACZfgdaOmpoZG14AABepAAAAJoAAAGo8AnZfGhlYWQAAF9AAAAANgAAADb4RqsOaGhlYQAAX3gAAAAgAAAAJAq6BzxobXR4AABfmAAAA4cAAAZwzpCM0GxvY2EAAGMgAAADKQAAAzowggjbbWF4cAAAZkwAAAAgAAAAIAPMAvluYW1lAABmbAAAAJkAAAEQEG8sqXBvc3QAAGcIAAAAEwAAACD/bQBkcHJlcAAAZxwAAAC9AAAA23Sgj+x4AQXBsQFBMQAFwHvRZg0bgEpnDXukA4AWYBvqv9O/E1RAUQ3NxcJSNM3A2lpsbcXBQZydxdVdPH3Fz1/RZSyZ5Ss9lqEL+AB4AWSOA4ydQRgAZ7a2bdu2bdu2bduI07hubF2s2gxqxbX+p7anzO5nIZCfkawkZ8/eA0dSfsa65QupPWf5rAU0Xzht5WI6kxMgihAy2GawQwY7BzkXzFq+mPLZJSAkO0NyVuEchXPXzjMfTU3eEJqGpv4IV0LrMD70DITBYWTcyh0Wh6LhdEgLR8O5UD3+U0wNP+I0/cv4OIvjvRlpHZ+SYvx/0uKd2YlP+t+TJHnBuWz/XPKmJP97x2f4U5MsTpC8+Efi6iSn46Qi58KVhP73kQ3kpgAlqEUd6lKP+jShKS1oSVva04FOdKYf/RnIMIYzgtGMZxLnucAlLnON69zkNne4yz3u84CHPOIxT3jKM17wkle85g0f+cwXvvKN3/whEjWYx7zms4CFLGIxS1jKMpazvBWsaCUrW8WqVrO6DW1vRzvb1e72so/97O8ABzrIwQ5xqMMd6WinOcNZrnCVq13jWte70e3udLd73edBD3nEox7zuCc8iZSIqiKjo9cExlKYbdEZclKIknQjRik9xkmSNHEc/9fY01Nr27Zt27Zt294HZ9u2bWttjGc1OHXc70Wt+tQb9fl2dkZmRuTUdBL5ExrDewn1Mq6YsX+YYkWOU23sksZYFqe7WqaGWapYtXfEp90vh3pH2dlViVSvy7kkRSnM9lH5BXZ8pBn+l7XcKrOvhzbaTm2xe8RZOy1uwak2imNvGn0TyD9qT5MvZ+9pMD2HUfsWy2QlhntyQyXYV+KW3CWVU/s0mJEba4Y9SZcv6HI3Xd6hy9t6yr6jYlfOOSpMVSlSVdVcC51jIVX5Df2ffCT5OLIN1FCt1JVZY9vnjME4TKBDgprStxk9W6ig0lXQmSfXWcC4CGv5vh4bsZn5LuzBf9g7VD4rKBcVbKBq+vPUmEod7Ig6WZo6owu6oR8GYIilaqglawT+w/xm3EruMWo8iW+p8x2+xw/4ET9hHzKom4ksnMN5XMBFXKJONnKQizz4YZbmCA5CEGqpThjCEYFIS3aiEG0DnRg74sQyxjHGMyYw+jjjIj8KojCKojhKojTKojwqojKqorE/z+nO2BO9MUb5nXGYgMn0nYrpmInZmIuF3GMLdtB7J713830v/mvJctXYflBTO6Vmlq4Wdljpdpj/4g/OOEzAPEt3FpBbhLV8X4+N2Mx8F/bgP5yLp9LTVMqgytdU+ZoqTzvjMAELmC/CZuzCHvyHffGqaZlqgmSkIBVpluk0xiRMwTTMwCzMYb20IuRTLDpZsjqjC7phAP6Dm/EI64/icTyBS+SykYNc5PEOfHCRHwVRGEVRHCVRGmVRHhVRGVU56yi/wiSFq6y261m9r1/kMOulwRqmUfQtyt3S1Rld0A0D8B/cjEvIRg5ykccb9cFFfhREYRRFcZREaZRFeVREZVTlbLT68emHkREchKA7eqI3a2Hy2Xq5eAxPgndPvgmSkYJUpLG/MSZhCqZhBmZhDuuuuqu0eqE3+tlqDbLd8jOarXYEByHojp7ojcG22xmK4RiJ0ZwJCe/NrRSxN/pFFVdhyb60bMuyzXbJXrNVlq04e8TuVVBhp0VYsn0S5P6T3nhKrpKCrp9qP1gan7daSjD1/znsjDdmSMpvWQGrZAMyL3Nbwu5Qonx2j70vH+MzZCqKrD1nhe0/ds522Xbzkdlnx6+5e0pgd7x9bdaW2Vv2qf9pyeb4M+x7xj6WpHz6u0gEYRevq7vQjvtftzNXs5aNxvqbsNS/XcmmBmHfev8pgvEFlML3OHh1nfG4nRVhaVc+EwL+XnZek0m3k3Y341tKUpLttxNy5dq9ircaImsp9rnt432+ZB+y70rwVqlsGd7sB2wQWbwvwo56K6fpefU+3n7Fw8teH3ZehL2hGwrLvrGddvL6ftLfzb23f0E3FHazgguvny2+Mj8XsJ721786zgWE/Q8XFfh3uJB8lq6AsA3IuDLbF7Dq7Q8i6907+Ky4q7133XyzN34gr4t9aU9fsz5QwUWIGiiCR4rlceTjCZHLE6oKqqIwVVd9RauxWpLroE4qoi48xdWdp4T6qL9KaiBPWQ3lKafhGqny2srzB6PljBAAAEbh9+U6QJyybXPPWLJt27bdmK8SLpPtsd/zr/dcdaRzuX3weR9dvqmfrnUrfz1hoBxMsVIeNjioHk+81YkvvurBH3/1Ekig+ggmWP2EEaYBIojQIFFEaYgYYjRMHHEaIYEEjZJEisZII03LZJChFbLI0iqFFGqNYoq1Timl2qCccm1SSaW2qKZa29RSqx3qqdcujTRqj2aatU8rvTpgiCEdMcKIjhljTCdMMKlTplnRuZAJ87LVl/yp7D78f4KMZCjjr5kYyEKmMvuoDGWu19rpAlV6GACA8Lf19Xp/uf89XyA0hH1uM0wcJ5HGydnNxdVdTm80YAKznTm4GLGJrPgTxr9+h9F3+Bf8L47foQzSeKRSixbJMnkSverlDibRndmS3FmD9KnKIK9EbXrWI4U55Fmc0KJ7qDDvBUtLii3rOU3W6ZVuuFpDd39TO7dYekVhRi/sUvGPVHbSys0Y+ggXFJDmjbSPzVqlk8bV2V3Ogl4QocQUrEM9VnQOGMJ49FMU79z28lXnNcZgFbzF8Yf+6UVu4TnPf8vZIrdP7kzqZCd6CF4sqUIvzys9f/cam9eY9oKFOpUzW5/Vkip1L9bg7BC6O6agQJOKr2BysQi7vSdc5EV5eAFNizNiBAEYhb/3T+ykje1U08RsYtu2c5X4Nrv3Wo+a54eAErb4Qg+nH08UUUfe4vJCE21Lk1tN9K0tLzbhbmyuNTECySQCj81jx+M8j0X+w+31KU1Z7Hp4Pn9gIItuFocAwyEPkIdk0SD3p4wyWpjhCAGiCFGAIUz7OghSo4I8/ehXf/pH5KlcFWpUE3nBr8/jPGIYi5GmJmjiGCsIMZcC7Q8igwAAeAE1xTcBwlAABuEvvYhI0cDGxJYxqHg2mNhZ6RawggOE0Ntf7iTpMlrJyDbZhKj9OjkLMWL/XNSPuX6BHoZxHMx43HJ3QrGJdaIjpNPspNOJn5pGDpMAAHgBhdIDsCRJFIXhcxpjm7U5tm3bCK5tKzS2bdu2bdszNbb5mHveZq1CeyO+/tu3u6oAhAN5dMugqYDQXERCAwF8hbqIojiAtOiMqViIRdiC3TiCW3iMRKZnRhZiEZZlB77Pz9mZXTiEwzmNS/mENpQ7VCW0O3Q+dNGjV8fr5T33YkwWk8t4Jr+pbhqaX8xMM98sNMvMerMpfyZrodEuo13TtGsxtmIPjuI2nsAyAzOxMIuyHDvyA34R7JrKJdoVG8rx9y54tb2u3jPvhclscpg82lXtz10zzGyzQLvWmY1Ju0D7yt5ACbsdb9ltADJJWkkpySUK2ASxNqtNZiOJrxPv2fHQJH6ScDphd8Lu64Out7oeujb62gR/pD/MH+oP8n/3v/PrAH56SeWH/dDlxSD+O+/IZzJU5v/LA/nX6PEr/N9cdP6e4ziBkziF0ziDbjiMa7iOG7iJW7iN7uiBO7iLe7iv7+6JXniIR3iMJ3iKZ+iNPkhAIixBMoS+6McwI4wyGZOjPw5xFAbgCAayMquwKquxOmtgEGuyFmuzDuuyHuuzAQZjCBuyERuzCZuyGZvrfw5jC7ZkK7ZmG7bFcIzg+/yAH/MTfsrPcBTHcBbPqauHXdmN7/I9fsiPOAYrORrrkQaa8FG4aSvBgJI2EBYjnSUiUwMHZJoslI9lUeCgLJYt8r1slV1yXHYHuskeOSLn5GjgsByT03JNzshZ6S7n5JLckctyRXqKLzflodwK9Jbb8lheyJNAH3kqryRBXssb6Ssx7jmG1cRAf7EA00sKyeDgkJoxMEoySSHJKYUdDFCLODiiFpWyUkrKORiolpcqUlmqOhikVpO6UlPqSX0Ag9UG0kwaSnNp4a54tpR27jHbSwcAw9WO8n7w2gfyYfD4I/lUPpbP5HMAR9UvpLN7zC4ORqpDHIxShzsYrU6VaQDGqEtkKYBx6pNAf4l1cFaNc/BcjRfr9oVySE6A76q5JDfAD9UqDiaoux1MVM87mKpedDAd8CAEOEitLXUADlC7Si+A3dVnov3sq76QGPffTGbJAmCOmkNyAZin5hEPwEI1v4MlajWpDmCp2tDBcvUXByvUGQ7HqDMdrFRny3wAq9QFDkerCx2sV5c52KCuEz2HjWqSTQA2A/kzOdj6B09lNjIAKgCdAIAAigB4ANQAZABOAFoAhwBgAFYANAI8ALwAxAAAABT+YAAUApsAIAMhAAsEOgAUBI0AEAWwABQGGAAVAaYAEQbAAA4AAAAAeAFdjgUOE0EUhmeoW0IUqc1UkZk0LsQqu8Wh3nm4W4wD4E7tLP9Gt9Eep4fAVvCR5+/LD6bOIzUwDucbcvn393hXdFKRmzc0uBLCfmyB39I4oMBPSI2IEn1E6v2RqZJYiMXZewvRF49u30O0HnivcX9BLQE2No89OzESbcr/Du8TndKI+phogFmQB3gSAAIflFpfNWLqvECkMTBDg1dWHm2L8lIKG7uBwc7KSyKN+G+Nnn/++HCoNqEQP6GRDAljg3YejBaLMKtKvFos8osq/c53/+YuZ/8X2n8XEKnbLn81CDqvqjLvF6qyKj2FZGmk1PmxsT2JkjTSCjVbI6NQ91xWOU3+SSzGZttmUXbXTbJPE7Nltcj+KeVR9eDik3uQ/a6Rh8gptD+5gl0xTp1Z+S2rR/YW6R+/xokBAAABAAIACAAC//8AD3gBjHoHeBPHFu45s0WSC15JlmWqLQtLdAOybEhPXqhphBvqvfSSZzqG0LvB2DTTYgyhpoFNAsumAgnYN/QW0et1ICHd6Y1ijd/MykZap3wvXzyjmS3zn39OnQUkGAogNJFUEEAGC8RAHIzXYhSr1dZejVFUCPBW1luL3sYGQIUOvVWSVn8XafBQH30AbADKQ300kQB7UpNCnSnUmfVuV1TMr1pMaCZW71Si7KoT82vrNi6X1SVYEa0ouNCPLqFJ8AFyIIN+T/dgzE0iUIokGJTUO69KpuBMMvmulUwJ9if980h/ILC56jecrksQA2l/AS6aDaI5OFmKat7bdan+r300lAkD0LoNugWfkJ7RNiFeTvHgv7fG/vdo5qh27UZl4kui486bLR98sO/99wOBPNFG3DKAyDiqC6qQppEoQRchTTUFVEFRzQH2NsFt90m8QUejsbgE6/BWmkLX4fd5vAECkwHEswxtfUiCghDaGAYwpgatwgYKG4TlUKoH9digHpejYQwHP0NtmJaogVAjkyoG1IZ8r3gbHWBia+bwxWhFrRPgrS2gmhU1Xr8rIaCCoibqM404fhfD7va77C725xP4n8/h1v/cApslQXqrW0G3H9DSgVJs2L2gO5q7L+9+4ssON+52W74RzR3oLVxHh+O6fBy8GDfTgfxvMd2YT4cTNw4GQBhT1Vq0yuuhOQwPSW9hYllqBE5hgxQuI0mxcHotihoT4K3CW82O9wQiilY3PEpR1KQAbz281Zreu8KESvd4PR5/ekam3+dISHC40z3uFNkRnyCyQbxscrj97LIvPsHXNkPoPXft+Y/2b31x2973c7Mnz1qAbbY/e/y91XvO7l6Zm1OIk/8zy/fo6S2vnom/es1ZcXLp69PHDJ86ZPLGEcWn7Pv3W788tLhwFkiQVfWtlCMdhFioBx5Ih3YwJSSrwMQTamR1s4Gbycq1JyqgRqVpVrEaNp/TEsMjt6I2DLD9Zj+0ZuHphorW5t5I87t1jfSnaZmCm//KTGvdxp6e4Wub4GCCulM8fqcupd+f7mEMYHpGsn4lOfIC50byojNra86C17bOnVeyqHfXTr16ru5J7t+K8rattJLPdO7Zq0unPtSURQ5niUU5JdvzOs3funWx6elhg3t0eXr48O6Vp3OKty3ulFO8dbH8zLAhPbo+M3TIc788JmY/BgIMq6oQf5EOQCPwgg8W/IUeNGCDBjWKn8gGiVwpUhpwpdCaWRrwTkhpxjulWQrvrKFJe+iWuqEuwVqXE9FA0ZLwHk+uJKuuWoy8sJpwojK5mnC6uFqYMIMphcnp9sqMusZS20w0ca0R4p2ZGRkhooa98Nqgxw5sKzzQZ+xIfPzxrdMD5YO6Hn7+PKV4cdU0usG1dW3KpEmPtx36ZPeBuDBLfWHS8k6vf7BzQe8Xuz9DZ87bVLXt9oTHOnz6xDgsTpw+b9Iy4fOBy//VutdD/6fPWEB4XnRBUPc5SsjjSNUeh4HlPibomIsvSivocvwEEBbQZuRFeSRYwQJqnTRV1DffZst0ykQwKfYEp8njJQum/jjXs3KvBZf2eMGzYGoFeeZT3IzPdZw2jqbTz3rQWfRmycDxXXfgcwAIHvbOzFrvxHhCTN4Mm92fTog3M8FmI5kv/DTfu24v6b1hsHf+D5NJh0/o8/T1LuMn4U+YlnwGs7BRt/FdaAkdCggNyCChh6RCHUgO7bvIdlfU9z1QlwWSRNXCektaIlsqNVNi7jnVKdlNguDFrvRMK2xlWRuFTVvRk4dm7Hl7pnCx75px2Ju+Mqbo3/Sn/phMv/w3R/40rBTTxXchGuoBe5kKuvuQMWxfurtzuKxuK3N2Vh/ZiIV0xB46Agv3CLE7aTqe2InFgNCQlmM6XAUzOPmbNPFeEOEvBc6yV3ct8XJuVn/xnSG0vHPO4q0rhh3jOFJJEokl74LAOGQ7p2GkY2ILk1iaiF+RpDWAsJzFsUlwmnFdP8SMiTFj0p2hFH4qk0crBw9Xy9tn339/dvtBrR95pHWrhx4CBFtVjqDokdAODFpkKGRPOt3o27WJDNw4U24JQGACs8IoZoWxbL32oRWj2M1R7Oaws+I2GKVoVjR4pkgpFOJOIYJfsfna2uxe3S5MVt2dZIpR5RVfXxfLv/u2XNg9v2DZPJK/OH+BQEbTvfQA+tH3Bz6K7ehZeij224sXyumlihvnbgJCCQC5LL0Hcg0uiUGR/pxsgMQNQkzThLB1E4FPspzCbZX8qT5yeQ9dTGwNxdP52w4DIPQDEH1Maic8BcaAa3i3MyLSBDRBcfKVFEWzhOcVHps0h1MJrefyY41fYDGmse5GEF2ir7Ij3hrXY9GERWt3o3D5eAVLa6aRqwtI69mbemSv3LDk6K3zuy7Si7QPIPSvqhBuM3SemogRywDF1qCrywZ1OTqI1f0apGkfA/bTNgGO19L4rwGA2WqsQdNj9cwNFM0TJsnuAf58XUVtEGCtlhS5oT4mhhKSosYZ8kgpJjcORUkupNeNuYtzCqumFOwOfnTqm+kjpuRUAR1Oq/YUzspdtn7VYqEtyc1GyB//5udX/jtAa+FRZx/4ovzdCYuW5MzOI0DADyB2Y7oaBXWgizEChN0ClxUtIseKzAGGhWJZDvIsRzPL0XpCqd/EwTvcukmjD11Wk5B77NieYBZZcjA4Fw8m4Ndr6A7sPlr4qbI9OdYEENYxG2jJUDSEQSEMyJZFhiFMPrcAVDQxzJ4pFjkiU5pWLzwpmeqxSc62NcB3ID4M1sSjN/MTduZvBEapzRFPWDT2+hKq2XSnmEynupJvgm+1GJl3+JtfrpT9at1pXT5p7qpN86d2aEOukAvb6YSH6e3rN2jwwoczZ6svrdzlbwIE5jP8DaRdEA8u5vPCKlxbAr7/GCkBVEvgiFQUrUGkHjjcsmi6Bxf8fgVSBWbcjholEJ5JuVQF8RMO7/vst1OnaSX2wn+dGbA56eWpMwtWSLs2iLduzKe/nrtBf8ZHg51wJRZLwXHZPR9/+9r7LxbuBmQWCGIqY1+GtkY7D28Fxy4pkQYO1QaO6OYeVEwNvvZf0qeyQrgkdb7zvpRYBCDAOMZLHd3KXdC8Zm8d7IUO9vawsnH98locnAsvsyUv9ovcUqGel+tWnFffWUukmagORUuJJCtkJKEsKyKTEHimpfOFes7ZNoPRVjFhcPaCqsCZ4NzsQeMqykq/W/PSnTWrcuatpt+MXrigfMEiMX10Ses2H0z+8PqNDybta9O6ZNT7ly5Vbpm2rujWsgKx3sKJY/Pzy5cAEBhaVSXc0uVsDL0hXO7USGlnAzuXUrBzO+FpBAj6L7tBRQ1OXY2u5RF4BqRLxLXB6lBAcvuZl0hlLt5fk00LD923ZeCsvcPHnsi7dJuq9M3G3s9/p9/329B449RpqwvInA7PzbiRt/KbGfRD+nUG7UWnSuvFL+9kP9f13Zt7175YBlVVkMsi4GjxcfCA7XdAE4tnfwgTQInwhIk8kLE7m7Ko3IPd6WX3fCJMQBmUGAAlIsvW7wSEzvCRME3sCjIkROgYu8r8up5LoeRAPzrQTLIrTzG3NT94AKevxGkHOL9FWCBcET4GAUyQCsxgWOKgkxhp3ZpYK6rzlEK4UrlPeIz/Ca22BEs3AyDkwgHhmvhEGIsenDkWKaBKHIuOxC/UD44UelaWkEUo7KO5K+mCUiDwRNVvwiS214nggmf/InYls0Ey3+v6UthY6itchUUF/jZ+QSh+seCVmXkvfmWEPL+Jpbzh8ngYaftUznNjsobP2E0+e/fDsy+P7lJWXS2vm7zouYUDRmdNHvXvlw8f37WzZNSzRfSj6vIZCIyg98sXpDXgh8fg/4LaNpSbmBlis14BBbS4tmYOMS5Nk8xx/JdZ0dqTsL0F1LaKVj88wUrWZgG1WZrmDs/FKdojJFJvmd/y6sqbmWHjEjkFmeclNnCliMQk20Q+cuoJPrHbbCxoizaU9dwl086ZkI/FXHpnrz9jcddlK+1xU/dnPTunW7p91fglsp3uptpReuTt6Jjl6D3d950HUh86mXWHFr0VE1OOM364jUN33P25zrO9HxjbGFu1e+SFtfj7z/SrbT3+9dXJ11BY3fzh4IUvr7+NC7DoMM37/RZdVdbCPcHb9gZuxfpox/d+uE770uXLioYPsOAfDb/nLDYAkBpKKpggCjrWzp5rHxfIbCBzdbCIRPdfkVqrRemToZIffehmvXAyuDH/EGmxjbQ8GHwKf7iFM+h8dujSjdQjxSBAMYCYp2fuCZAEPQzxsnb2BHqEdKZpceElzXE8ieKRSAkrIRpdjc/qCmccshvZkCUjrlRXKE66ivHadz9MHDopn35FD+ODuS/RT2kppsxas6SA3pTUA6XDNzR37Z5z4DopDv66eBqa1s0aNWU0AMJkFhEuSQcYhx2MftKY67ITkrgAd4A2g3OsGzliSRNXLtGdDFZ/OtcacLo9TF0Iq6ZteuJ7qT698T2l9OgKjNr5FSY6y+puLXz/9CFt8/YGeOrLu5iNGUuOY/prNPj5jvX0x7tLv6NfrXgbiM7yIcZyNDig/T9wzJmLCaNirMbW4lG0OVnkFk2ClXltVtoTbzG+tA8bb8JN9PKBs8fK//j6gqRuo8eO9jtFj71OJNvdxRhf1eMW2gkA6kg66kiehrBG/Sk/ixZlvq3RBqcoKoZsTdHMBhdpdTmq/4TrwXzyv8ohwqpgSzKZbAlWbpDUjbRF9fppbH0LPPIPuq5ZiBhW74j1ZeOK7ur1TgQ3lAq5wfvIEJITnMnXqgMI05h2XGPakQSD/7+04+/qIa1RKLo2Sns7rlFSI9Lv7YcbPcM6rWEEmlRZ5A7H61eA7ZLTTVwpRKjWHB46xGtd6R+qRivWEPRhwk1MSCrNoOVlh/H6/lEv++lOouwfkbUV04/Pxi444usL6KI/0arJv9FPWrfHTutD3Elmfe96GPfOUOYZFMqwqyrwqoGTusmC2VqaBftFbKheXXFKfaz1SeayYEppKSkvY9s3QFKDy0g215/3WDNZr0Yb/sORsf4uH04uLZVU/pSfVUAn2M84aGXMZ8PBm+Nj4KRIA+CpvzWUfvlCxacQXXb39OWfS/PnTV6Fknr39umK8iMzlxQuhGp+JJ2ficbMM1x411Y041kyEJ6FPmLtCn1hBEyDRbAOSmAPmPtp7YGRJUuEX7dnyB3lnvJweZKcKxfKr8vvypZ+DKtJJw99iG5SX2PkLfwq+BEZ8QV5bTeNZxS2JoHgzMqz1VbQgCGVoMk/WQFE6hfXdB+OIFrl0rINzJ6qJZa76967j5FXw9YYlMAQo8Mn1Xw5BFE/4A91URCqvizEx+SyoxvtrMcteA2v3S610ZRV1G0vZXvwH/FVFk4yydC7w8Si4KbgUY4trK0WeFLDKG5Axk0JA6mtPQbz1IgEOiq944qFnGYMqai7rIx8sl8cfHcjA7JWfB4ITKqqkCzM6q2QBO2N9baRiFglslASaxVK8aTantNDGYTDq5+JmHSTtmVKluX0lvoG/X0VWYnRb+zE6OX7A3vfPS2c3b3nhECKL9CybcXY/lTWGXxsezHdf56ggA767e8j79IbGBeE6qhQqlfLdnhKi4rXS5YonsBBmILahZMWLeCfXbMQjm0cPaeIeSFW37uro6zXhVmlpO4PGEf/+IMWY591r75aQNeT+4IsLv169NznG1bkz1svAIHRVVGSzPhzQApDZXY3DuVtat1qVFYGxGrYP45KMFv5fVZDVGXZXrKRU5NkSpX/jtdkRivmTkUxh57s3O0etyrjtvTkvndOC6dxIuf2LP2454mpv9ru8VtCy84j+8/J+b1Dr1fzuw1APKpbhxMGaVKifrwi8S8k/2B0hgpbU0JplmJIs6J1y+Aak2AMR9WkyyZ0uLGGd7KflpThp7+jZVUO9jwVHIPeguItRfQKeSr4lqRev5B3rG2wMIZ8s3rGwuUIgNCNxa1sfl7EUIO3CVvL4O6NH45UmR+ZsFarE0boqaeHb4+hHKzHP6ew1ljj8hKQbcSfvqFw7a9xu+ke0vOPG2i/Vvjt3LJta5dtWoMjTw6hFV8WUuaMPnql6OVCkt/p46I3bkw8MXX+mplj+0wfPv3VsbvOTzgye/7aGRde4FK1ARDX6HluK6M4RvplxRDyA9XE8gi6hrbYT1uKwyXbne8l20ZAWMKYKmHvtMEDmmSPZzIb3aDhBMoQa7Q6BnORwWRKAS9z36FzEKtYgrTqmu8HepPs27HllTcltTLlFL2jECSfCtcrPRt37tgoXAVAnr+LQf28o50GJl7vGBM8g9MzujZAQfdpqXqy7iPs69qZ4M2S4Oenq8Rdd7qF/OiDAPJ3uox9DG7B6EANphnOB2oUOo4N4nQfL0RxbyqHuli9YwQ4M9HHGjvH4TVxMPhZg6aY/DLWbZL0aRndtJOeczrp0Z10cykeL31TuFVpVg8IN+90E1PHjr17leFDaA8gntLj70gjBWE8tZ2w8UgcUOTx1ZILhfA6vAsiC7nVU/nyWrlY3i2zKQFkjt0iQwi7HnD1/31kPvb7lKbjxZt0HS36DC9R3w1hHmkVbBVMIe2CR0g5OcM5jWNI9zKkZmhjRBrGY0AaBhdajwdCHxmGM67QqFIadY2cJ1crxwZvkCRhBX9/TwBxmh77Hoe/Tz4ifYoI3NHwcwcpPGmRTGwyFPv9/AzCge2FR+9eExpV/iD8sWHDcnHexqV8vZX0CImW54AJUoAhVk2182YhUttZ+ORZM4nev58uxKnSV7enFJne5+9pwr41tKv51kDSIm2JPci1o4lKBqqSeptnMRZ6BHP0VVP1uzFNJZH4VTQm7HZ+hsKSCQtOo7llZfKcW52L5Dy+7iPkshCv25DXYENhVQ9oaOLGwheRuFOornBL9r2BzWdjs+3iXtqIXAw2BQSxKksoAgAB6ke8pnZCJfHznKLKUcLqNWuAa694Ca9IFARwg4q8yMV+9z5foRI6WXo7jiQRwpM9vvyVTZR+wh7zgB43K4RvxKehETSBqZqzaTO9WFbU5Opo42QgnIm19d9QYROnnnlF845HePZ4ZK1ti3ZWx50kw7GeOzKH93h5vsx9uu/edwv94MdpjXc69NM9dzI/2muiRM19a/NJxK/fnjh+SO6eCQcn7T0nemh0r/XuFfSNicndc99ZXLy3x6AJQzs9u6b33ldpnRd7K0v7di4/3GswEN33JssAdaAuDNVs9epzbDZFFQLAvFI4s0w0er1a5xiSWdCTzRjeqTG1S3SnMX1gJz8mnmNnJNusXi6dycrdtZh8s/TkOEvJ7nG46Mbulfnvdevx9oLVxHqLnl0xU4bgR4vpBRqUPjxVQluUnAKE/7C9qmB71RC6aEqjJLZ0xNFbYu3cBiIzGiYfP2SLZ60RHqfWV4dBBKu/mnG3R98AxjZ5aMhq805p0sEx/6N3J15e/e5P5p3mgqylL63LmdK337ah6EVI2vh73pUdWQuPl7r3HuMaNYCh/FEGiIN6jOHE+g04RYkhhuU0w6moIZE3opeEGJ1hveMM2//2s589neW2TsavmysRCf0DgkwrF2JAxf59Y3eXWMYe+uC73UW56rP/eiOviHhuY9o8kn4HJuZh+i3T+4GN+NPaMxx7P4b9F8awg3GcpZl1jjl7LPcKw0usbQD1zMDvq5f29v56H9cj/WodhigRH7tCd5qNOZiUAv57J9quhITQSSCmyCaX3+MhT12jFdP/N/fsN0G3+NaiwXm+8Xn08rgiG2lkzotH188pW4IF9BsafGrzwW6P9T4tHHtlVZ2lLwHCAwDkmOxg0gzR4hK4FUZI0ShSwRMjQ3Ft+TjfaEiPYyOdpWoPML3i5zzsJF7/1OA0hRSIfwD7cvv2PSWPPByV5u87+Msvhe0FY3fssxZasgZnF1T2AAIDaU/hZ8Z4XWgMOVpKqofzk8KTQzDAC9tfYmT9a+ODGjcV0hsup/b/uHsP8CiO5H24umdmV1mbFwSKC1qSESjawiByjiYbBJIJJgsRDrCQwRiTBAibIJJE8JGxEWPSioyJ4mxEOM5gnI/D2RecpW193T0rNL3Ahef7PekvPTubd7t7qqqr3nqrNtzJQjcRHlHt/DlmniIFYYp7RJjSfAG8O03jojC5SqsVq6yvz17MCdzz242Zn7bKmrV/cVHOmVPflK1bfOC5gXsXU/nyoqbLZ1d+euOfowfnrF6/LHM+SvzX0etb0Peb+D6+HED6xABgpnocZLHy82JKEFB4wevjd8LonbDacJ/tWUF6M5OaFMMiXa67PKRHnfIuoMGSB43PeX5JvMcjHS0i+d4U/KeZU7N6VzE2Bwa2DY9TznO+WhvVEBpGP5m55kjPrHtEHnANScigCDCMjr420OO5rOHxcjqKfqpNm+effRZw9WnSAw2l3xcCDmbDnHV4mMK4ffAE00tPsA6wo4aAwe/2BNWk6B1hU2ycO0VzgSUmgdogepD7rZNjktu0s6alpNKxpMrpld3IZcuagA795eMoulkGHxYgtg5yiAHouGbqgiymIqLWPxmDCeAYiz0d/FGYcgii/qDv6UchmIuGoFoQJk1zCstmeDyjUL/PyDB0+w76aQ5ZaICqkbPQaPKsdxkg2AyABhrAD82Keiyaxc6EAdgcCwAMs/nuMUuVuWUTNewJBk5Qt5p52+gdW82devROPe6lB/AEuMKvSgMEcL0O836czDik+iRVo2ewG644doXSlVnlXzyX+tYf0GiDZ0L+i0uCyx4c6eCR02cvf7t3FlnsbYrLZ0zPG+dNxBe+3VT1tZxeo0t0VmborwZbrOKsxIkIm/ijEQZzz5k1CNZrldNfrVArw9zLOrWS05ds1qsVHRRgGEa9jGQ6qnCoBx3UkPqRPg6rVR/D+2+AqlVwfuuKjDC6dMAYctQUQQ1Hji/hsPxPCj9C5jmfvXGP/FC2a/mKnXuWL92N3VvIMvI+CS2pXI4SqwIP3f3okvrRXeYBkSw5io8tAqaoVm1/tjL8RtBBXRQqrJzFPxxUQkRf6DE7tegLMVFnkiA6Q1Gfn72Q69kTmHvl3S88m5fsHtB/32vF2PwLuZHv/UW5O3s5uUt+l4/eWuutXHOT+xkkS/rBN4+Jop/xH3YOLuQWYfX9PY7/6G6kMXjxEXfj6wtncgKoQ1d2/itP8Ws7Bg/ZvqgEx1ejxq9M/j0ey7NRy6qAsltvYEvhnzXZxUV0BqHQWZXDWKZRB/gLg/XbEbj/jHURV7CPh8CX07e8TlzUpOWRdp5D0rBdqfWlNcZNXpDT818PA8R9tONyb47VBGpYjXC6BeKjKtWvIcCGUhxeUGtJQCPrm0pjK+hRbSCSXhvUcBD8Ga88l69xTyScSx7s6PPZgWP3y155Ycy0Cci+v/+XngWXcz1KwbTx81B0j/7PDpjR97Vjp9b0nDKkS4eObQbNGfz6geE7sjInD2RxXfW3eJDSFuwwUg1zOEVEo46ehFDnUU6NRqBjoZ8ksFAC9FNldBoLs2Nm5tnw027nYQvzfMxocXl5aruYp7t1mvvyhQtKW/J7oTe7XbuQdbZ1y/CWQmQABEvout+jJsJErRXFMESMTBiWuN3oCdka6Qo/xgdoyAbD0SAmkFRApUaTrr91GHku3+rsKZ0478oFfMbb6ecSyVp5EQBBLIBUJqc/HgMSRK7OIxiQImBAlF0ZcpLMXUFmn6yUMiovMiuIoCmAcpPeDIEsVQkN8/98Ub5FyX9y6AXBEt9ktKugYN84OAbEhmK1JsndKzzkwjryWzWsIxeP/blqbbXUqvKilFz1Jzm96rbUBBA0BpDK6diCob8wKB3qU+ffoz5BMoek+NUj6I6VbeSSxNAd9MvfPyAlaPLt33//C5pMSm7jA6jA+5X3I7SWTMQu7AQEDtJDKqWjCadeEZjM/iul8wCF08KcIwhjuq8nUwDTU20M2OV2pzgZhYCO4/uqi6TXmHuuTokjxsc1Ji+Xo3CpaWU0+acUuk7uOWaK3BwQDAGQ3qEjETGgOv8HGFA6nlO1Aw/0HpKSi4qWSHU3vMoxFPIGLjG0hjrQUrXWjeAzD02guqgjhkUbWRZLqo2iDPzDOQqckuxKSUxJSWURk5myRCiL3OLEsw++c+sWPvBO/PVdu6T3yRuJ909c+tfr/6w4+lnS9A7kb+VfDH3+/vvku/ZsBAcoJ6zjE5mqiPlQHdeuJf80nGKvttLxTvONV9HGyyCPOpQxH8y9WTMdr5mO11I7XsVi5uN1plKmchods4nGFQ6aEU+yx7Et3Wi9ajx8+Hr8QRXdunX4QGU7FHTvwYDnvrqKIjpMT/zMc+OH1/9VfuLzRPb9r6I35B+kOHBCe9XMcwNQ68g4OOZUGs4DfVuC3paF+9uyYCYizAI3x8wiG7l9djipsKTIPxxf2nX+nu5Neg/Ydqyg5/LStpE9R0qBJXdS1jSYOAJvfb/ttiA8YyRgKCDr0Vi5F48fEnXxA1QwaE1QaaHkBTNtYdCc1WVlrjqLG/bufljxgvdXfqv09EUNiNYwBFMmajzEwnMqxLnYnGu90Dr+wLGxQg99BHHow8ZsNzvWYUe1nj8AYtBqLzAVJwuvzRBQkO6jKQpiuLjK887l8oOedWcMGgiy6dU5Q1++EvHV13Go/j3XLRQZ+/knzlvraqAQBMMAZBZdxcJctb7/uB+B9qNtPK6LTlBHRtM8d2E0ylVPR6NM/WwE+iGr9gmo0NS9NJrRAR4/Q+S0GWONsYwml5bipluVJOzFlAqKzga0wR+hyl97NUrEATu2Bv50+dTHp+fljF8QiDLwlHsbhxUXB76aFfBRMZIvfX/r4MS5G/NJVTEApufmvjJM/gfUgyaQoeKmzbR9qdRdAeL+ZapgMS4WUECKRbn99i+30Z0WT7XEncZ9mDSnkXG/nEZkczgSOamZc6HkPluuX9uyaEHBuKmrF6wueff8lrULi6aMLVxYlTX9/Ofnc3MvTM09P33qwgVLFq/YXP7+m0VL1s2es37pxjevnt+yagnOy7v1Ut7NvJduzpl9i2lVNIBMkyXgqMkBOOiwHUISs76/vxhulZqqEOKgEz4Ubo224sxSKxM2elQtWEcPZvpoZEc1DNfKZQXH5Bnv317D/ef/KAmPRZM+JCPQ02Q+mk/mnyWLGPKMniEj7klheLu3Rf6OueQUaj93Rz6uYOdgNbVgvbgFM0IdZsOERJWqIKkp1TXqEDDXcHVZWRk1+c6qr6TL+GfA8Dwxy3OolCZDR5ivujp1phNiVT4ptYgoLw9iH+UI4NU8DpOaoaO5OzJ8MFkYFUgBcWnh4ky6FiY1rfbByLQW/CuYkPAqIiFC0AjezJGJT0l7yPFujqlM+JJ+cq0X6ZCjcEOKHWu3nVw+5DllnbqSqr9OvdK5oOzQ5iU7V14/cibzSPsuKPjjL5Hs2V2wctvTi1H0ntx072fP9+jbI/U1VL9Z7wEF6MDJgS2XjN596elnct/DC4pmZg0d36ZFzqacsiH04Z2XP38vf9P0Fzr1bde3a/Yr++rUs47p1Llv++fMtjGdhkxm52Gs/Hf8g3IBKMgHkYyhqauWYNlOo0nTAh7PaRhFw5obY33sxbe1a2UYJSxS69fUZwRBgmG0kutvynmuac/AWtWd3oqThZnMsWOqT+Oa05PVvEZaU+mdVO7DpzbXSLeHwqVoCWeqQc1TeeI+4RAEmYLoA2FBEi9ewkLg8/CeWo9n3UpTaXa8tuyrOdVgWX/6uD8sOvs+knZDm4Xy9i2U/NXAxSiPNJMeQxPpPsaCPPKtkuKTpzdt3f/GyGEjJk0aMTzTi7YiK2qLLFtLyHfbtpJvt0w/jnqg+aj78UPk8MUL5PARPHDDtptHppTe/OPaUQOX5eXOXjZgzML95MOdO1HD/XtR3K4d5N7ecvT8pUtkZ/kFsvv6NTSEawx+Rwrna9kQJqlh8W42szDGjRfp2aocb9fqOlguB8t2nujgV2zXt1OVrt3mzcHscU7JkPSJjhj9AtUkOlJZooOtjltbK5rm0LIcTJbxhBBDz/mzFuzaP2lupz7b9i99bWME+WPTIfWn9h+Kz8bFD5r7Ys7s5MWpSSEvLihcRM5n98trVG8lykgaQfnIY6FIGi29A/FQ+jsBI5SijtUEEMxDs6RTUgwoEMGzbaiCGjaRHcfcHU4YPlXmzZMy0CwUsA1keJ5K3n26WmEQBcnQGvaoqW24yqcyN4IdrfzoEhkgfhCZVagorFdbLBjDfXjKGVbjNMZaHJXJOFMclcmUmDhfHeHpFJR5CFJMKfTR6FqhbBSdwt9rKk2oKE1IYAWXrbEuVheFLM3GaLa1Mqgws8vJxcwbc9pd8cnueLc7SSuecT3vL27TqUBu3YZsxcXkWy6Q6MwKZNuwZ/5LyPx6mGSaXrq565Deo5fhO34yd4nJ5B4Ut38fimUy+RN5W+r3an5eu8SNrQfFmxp4zFnyfNw+tVtrAASzlVipPbfnZuDFJpLI6Zbae1NxuRJbCBgWSGfwXHpugsEBCeLys3LVkAQ1EAt8G2F1uOhxnXXWwEk2x4K1E8atXj1u/Lrq1O7dU9N69JDPjNu8afyEdescXZ5J79FnUnfAkA0g/ST/C4IhHDqzajQxog40Pa7OrTRU4HsoYQa2eQYr9RScKdbA8YK0pWgSWbOLzEOv7ELtqk5KHaRBReQFVFKEiitD17OVao834X3KcXDAADWAo8lQGyoJBC0b272wUEgV5tC0Xg2ofTyMV/LYHMyR5YuNauuoWImqLRzH4n3ePajZ5LbP9uhSvAsFbJw4oBQV4k2TUMTYTi1b93xm2pp5U8ZN7PM6IGiDC/FGpQziYaka424kjk8opWLjg7phWinVkRyYB4UgZaoZgHKPhEM0JICklVSxARtxLXk6rK6PyRxfq1E2XlOlRmqfV5eaID0VXdtSxaoqnxQ8rKpyu1DggO5dMzo/06P4zblLN3duv3bvkoU7S/p06Nxt8xB5TOsWT6UnNX4hb864tGF1GxdOyH954lPPPpuUy9m6efIHuH5NThrTnDRGmRrAcohNBWcyB1GiOWqJl1ayyP3ZT8mPaxVC7rL3b6TI3vdyOligrxoq8GN0MK4Ql3JgxOJPg5J15CdjqHZGzQ6O1mnJQo5Fov7oxRmX2pTtCszcu7ofBXS9i9/cvF6Kqbw4fXE30lS5Cwg6AEhtOeetqYqDQ8RM2iOUcwQBGunPTI0Oc1lizXjRgL+RX1DQ31AoDiC3/1z9e18209V4IpojdYNAcKiSj22IEw4G0HF/UO8eV9GaEsvVWoklvsNqLBMyqGDADNIL7QWWy26nKuEmcZ1MfqDtIavBZaDGE3GI4qDR9xWlSEMLYjURcGvuVhqKDNmwtdDYZ3DbF2KS672RnTsxOaFZk8BFjJ+Mt6MfeEVkWxUx1OiJhZE2sTAS+xdGst3GSAsj0Q/FH6BRFrwdD31m/kwATL9Dldw8TxRBv0XSsF2JuU+iiVOD6kmaF6OaJCEDL/mZucdWlxtfOrFx04nj5E+n3swe0H9kdv9+WVgeVfLu2Z3dt5w7t8Mwetr0Mb1HTZuSDXxfXS/Nlg5DPBwMBTDCQTQB2OMDAZTXlbfADReqP8Tr6bWK6kAAMsJlfBsATOLy8JqhvgDKFf4eFb6FAP7e23g9MsJFKYq/R+CA8ffkACjfKcf55xfx91yWGCRghEvQEm+qeU8sfU8sfw9g6EjmSbNpfF4H4mCwGqixIgNZ1QDLONa+nsXnYIrlSNZ/qs8pjaW7tz77FiYZjdqqJhk054ZV7/C4PoWJL+6JGmcdC8YzJo/O9+DPjp6/vXVye1+1Dt49Yd4fzo5qOHl67rBtf7ryzlsHcnu/gVpTr/epZjxj+E8A42DOwbbALJGB92TKuGo2gIbFPJH6rwaDr1ZAyNYL+5PFAL56WilWcrHtycovKFYyDq5aEe7903ufS1Olo95eNtzbe8yBz/5+AF2ORtlki1K6njQu8n6HZuOPAMFQeF/6SB4FwfA0r58PDJF8hQJBgdzrlqVAdoWCZJ+kKxWqUQ7iL9KwGitCaQg5ETIiNBR1J8dmoW6o2yxyDHWfRQ6Tw/ReX9QnjxzkB1Kah/qRAwASZRa/SSt1vgUnxEBjGKvKTZpyjWTeLjvGV4gFXOJKRpg4vuliVzxmq8cpJJECQbMB+yA13p+IzGgvafG8LoVnTIwOq2JzsiQFNirJbuSopSTvezV75apTjDd7e82LK7YsxVXNXsDJY3dSarJkf9r74bA5D/nJz216cAaN688YtPk7qo+Tu6N+XCEtyaEk2tAjr1YVtmU0Wgw7AeRMKjeh4GCSz30DrXmHyLUUfVQEwb4CX5N2y0TPlcAMEwmYsYlatMr8FqvZx51FWci5+t4s8usX5PuyMmRfuXUrrVUiH44/9/K5B+QSvdnB+3HR7LwixLKyNFM4wWCBJpRvEtu0mWhNo4TSSf9tJsjKkd8wxapl8PT1ojHacy7+HIONGokVEzUbv90Whe01VAdt62ehtuYgmFFHz7WyQxfm9zgx6OqRfofjm7ZcnDIxt/vJwQXjhtyVB1d8886W/KudkkauWtJzi9qs/qaYZiOeS85avazf0GsDRkwkH4IEvau/NcyVe9P5pUBruKhiHjkwB6B5BTs+8zieWSS9EynSDvzRMhzJXZwQxcmzjpR6E3IthHoWTpFvE8LZIBHai9P5VWk6fXH6tXS6F8YKmt8Q1YYV2iubVrB8ZoJgB1OpLioxboMujIuvjeOcnMVj11g8aRSTrg3qHJzQwwCK70nlknafr9h14ouPPpkybvzyY/88Pr00MePt8Te+9DYyvr12zZyEtiVVgV1LEv86c/kEqe/0tWYcsch2aNCIt4qK3x44MW9KP2vh4f79+wwm1V9NLz3dM3rJnHXdU7/DU/r3ypSS9xVEL1wNgOFlVlFuaAaR0JT6x8ZmT2k4fWmjCqh1PKP8ExvhdY2+6kczv6XG6RBHUZCQhULu+opcZzzD75gsUeROcnOszhf+S8m/zfxg0eJ7c6Zee+XNOS1W3O12ZuHRZ344cLLbOBxbMPz17bvm529Q7ORX8mJmiXfVK58uWv3Vgmnvrlgz6tVhLbekFrwyuupfT7fudnrX8vOfH2N2rQvsl5+Sy+itUHBCb9WoMeWNPPIwMsDXr80F6/EU4nN7Dhpq/Z+DppoHHdoNX5iFHvpe5oe35KeqIqS/ebdqzph2xEOOoXTulbVpU0V4C4yMDA2xeYmyAI5xNlk85WDJPAIolZkRZUeXyAbwYyS4dG1iXDLfeDm6K+vRXbVuvXDu4zPGZg1PgJtaMz8x3AJbNaNr8Nnc1JRheZ8VThnRbe7Yd+d+umrcoO5zR7/nyUaD23RdthuPHUz2p7Uv2EUJBN6CJmve20jOlJClrrVX16K0czn4SMzdw0dyvH3rfugBDGspl8D9GK5fiD+b8v+eQWB+hEHg5gwCT+65xxAIjFu95Qv9GQSRAAqrIrWCEybq0iiPlInYeBkwy6iYbPwW8538qJSlEu9dpXD43Vj7sJOTpUwcpA9nPa9qO0PQC0scJ5l9Aa+CFy1ixUH0iD86W/UC/ogy/laurAJWzCbDShRHPkZx3pXnAMEmxgGS0/04QHWewAEqK9MyshsB5AyekR0nit5/yXMqxbyrl4HW4hkoHnPacI2FFAn0tlrNDkhX1YsMPh+fn60kjdp0emJZ2TC04hPyLPryK/QeSZLTSSoq9/7Le5ONLw5Arsd37WFiPzIxB4xCuO+G+FlAQn2nREenr4LX+qHxtiMcrOK4e0O7wkswjSlpdGDjkZH8xgrU6LpLPQbkD/BeK8avN8lvgrf7xoSDDADB0F3XmSbqkd4gctC/GxM1SRW+Skbeni3Nzoga2gAmlZSUrVpVJo1pndfa68BvpuWl4c8BwXbSQ/4Hl8/nVYPN/vg6kUfdNosfY7BU1vvyamgYr8O3hPlS1ZzpyImOKSm+IjX5H/s2t04Na9h6iTeJFgS+R5nz3t1llo1hFV3kCZXraNHaenkcW5vXSQ/p73R3j4BsNZRp/39kX/HFs/h300J1tDBOTxwXuSU+9pjDqRsup5BxUlZa6Iyr7xzDuzbRUbvaL83JP9CPSvzGtyuuVv34x2OW4tBz+JeC+a9V3aKyj2Fc9TfGQN6pwgWvq6hBQ37iTKURFYLQ6Vbx39b6lYaJPgeEcX8sQbUJ7oXjSS0uQvTuNIs22IaK3eZkC7PlD8uTFY1kxDsaGQOrStVp28lyVEC2z90rdWYVy6x6uXJ57tjJk946h9+1r0Ph+1DKfmQustEi5mJvVb0weWX4/Wvk0s1v2O6UXf2tEei5i4FmkAzrVENKqi97G1/Bji2E3UkgRgikW73Pxs6lMYj7XC35VWnLBDVMbwx1THnVpr0ygl/xIEKfDCp96uGG5nDyY41b5eT+6qNMuIY+Byt7zocrl15p3e781GtfexONf1x0Ynb3pT8tfi+jzaVF98ivnq0FS7duW7Z4u/zUqHUOHLYUu7eSpTNHj51Ovpmx98KklxdOHT0qF7UggUc/+Mv7R+7cvv3msoj8dUzetwLgBQY7z3ZLPNst0kVFIRH0jhGkU2vI0XbzVlS6vdUAZ6Oko/Lbe07ZVwZ/VJnlY6ArFi6b0TBMhZhYvqNW/Lv+UIoWsSsJfkE7CFKmiElhhTUMiE1hVYxG6rKlJtH7DCZ305AsliW9PeQLclb68cePdhS0TnCUfImao9Gbyde79nwcXnXtpg0NRZ1mGhFG9dMjCkOHkMXk4IAL5PSREqR8GHf3r4Cq/0p64BN0raIgV7VFx9Ah6nIrUXrrJbr9IsGFdxYUM+BB+imynGN4BcvERAhpjFozkZrCiekP195oT8JZV3dvbJ0YFtWhXZd9+/CBba0GOOKf3SdflfZVkl1HLatDxw2X5cLZu07YVwe9+xIAZn0ClWJDGjihIfSnaSG3z5OLq/g3xbpqeKjMfWnOWg7VnwEmHHFPrtxlqcwkk+JwGvX1u2b5Vx4sk5/XIhYr/31TVuYu8ls2OnXtJC/iPX1Vi5F3ozbXRt9A7fZvMr66kLzTev/PMsLIUVPIG4FQDUu1TGZZbxedk1Wzg1ZmB0XNF9v3GGSrz06EVIhRJ5tTrD9r1TcVo8OfvKrpLHNFry3p0nbdtW7UF/2Y/MOza0XBrj0Fy3ZzB3RZwOj55KOkZXsc1AlFSZWUx/qhx3T47l3Q6igNkQYMEdBTDdHtPhY6VItQcVrfHxpGoRE+ox/AToxYEmtnI7ZRQ2vAj9RXTs/ecvAc+vFmN12N5Z+Dl66+cT3E+/IlUuWQxVJLzvlTwuVVUBeyVCOvN4InUBEFP+yRiNcewNfdzqBz1cDvaBxrsfUTA7YFGqC9DU5RwldvLZVryYAdO0bKqw6tlquO61mBr2JX10mAqg+RHmiMnA6h0EgE3gUfQ7BtSNA3NGbv+lbJTL26Usr95L2qplGrWX29/FfJYAAIgGSt5o86RjQtYIw2UkdSkVnAWbdUYbVrND+A6LVs4ska/gzvBEZDmhRrkmTYsG7thp+nyt8H7d0bgkxcHuQv8M9KNQRATG2G81A4ikb0s0FGfMUq6PIy/yvJLrmklCR0Zt1WkltZrAzcG0S+R5YgQPCKfBV/oPwFQiBeDeRWnoN24RLKVANrs5jcEaZKwNc95mHuBH+wg/y4s6hnt859lL/MWb1mduc+vbuwGgP5ezROOUdHV0fFgcxZ9KMI6GgBK3wsgME1lRMwRz6E3Ya+EAg2aKJKdp67krQeyJJvGdUMI8rkD/IA2FLD8OL0KoWPjuscds8dNjwv71geOdyhZYuOHVomtlfmD575h/0vvTQooWP7Fzp1ZquZSPqgN+BpMEFzlYJJvioVwYlTlYcw+5FwU7QpwSRlslQCjfn5Nu3rQIZeTs/t3SI5tPPzQ19clPfUsEFdI+Y0Gzdo6MantWzRHamN8iU4oQ2fCj9Dh8IDogMwnwzvH8wkPVxA+G2196h5dYpsNg7GRGGOO7TJG9742eym9Runz52T6Xo6Kym66TPKvUmLbG1CM1oaJy63pVs6PgUYRsgVUjOlmrNoWjHo4EkpK7br8CZZD6MhNkwjfdJYk8+SkiQXzrxG/rVn8oW765Rqch0lkOsckyET0Z+rD/N8bTKbb9tgkExSjNRCaispmVqnk7aBLQLbBvYNzAqUqeAGoky2y0kmXmbl1CVtKT+mxvd5eXT3Li9kdev5wuDkzi1auBom/rNzdlaXzpkjOrno3QaJyYC8I+Q7ZI1hBoTxWnYq0IAyueTQL2QamGDMMMqZdEoq0uisoeDTOncqk5w0Xzta7wzUo/OwHsa1G3v3QvKdDUpUb/eEFwe27htM5dz7NNlOrNV/gABfn1GjTsCVGgH3Pq1J+E+agLM8ynZcIK+Q4qAznLkDPd9ryx5bhQuUK9pjC2Hs2LZMXrLklmi2wQoBEKsGBAaJUVEUE8pAnz/EYgZO7EtORWETMqVj2QZr13mrl8wYexkQtJAdqIsBhM/R+3Iq8EaO+r6qBsOG8ZnSUZQtO7ouWLVqwehLgKABuY9awWEIgCjf5/yn5qwrxg+TPKPI/W7z3vjD6DHldJ7j5Jb4OJ1TPOwJYLmlPagDzy09KzvwIgPQx/eGsMf3ogxgUtSA3MSj4We+xi18NWSM6qhQa2B59Ls1qSqVmWXQjcMpDugjeizLJje7Lt3g+eOkm2359UQqtQiWYSeOk64yNJ1mnMN9FvFgUG2eUujtvCxn+LBpU0Zk5kjy4KmTMxsOnpIzBBBMgg04RjoMBparUqjpMyo1XYQZNsAaZUYhvILcQe4VOJ5MRwut6DWePVmPw7T3cbmVjMCtH1tTZGe87wfITe6sRJgQ6TDJs5I8tBIVAqJ6PEWaoMSBBIHsnfyr0tzI+eY4fGncFNYCmq1yKl6Fjys7JJqxA8CrwCpm3/iigY7P2ZhGS7E8i6LDUR8BKRrX5SBF4wQVdGxAAZuoASaYejfm5LDGvvq2I+H2aHuCXcrUUwnrspQNT+frmz+ywMnCgjaGWvpTPflFYGOxgNIZK9nJQamW8ynt3SlvLzY8pH0a0HCyR0b90e2ONdzPTvlL8o/WkD+P5i8BhbEmDam+/vEuiKfrclAH5osOmB97Uux7aQpx+lA1zls+FG6LtuFMNrEGCQzyrJPgk2ObgA1GV1AIlVc28+ax9RMoBkppRKz7vMyDoXCkp981ZhiMGu/k9T3uwIiHXVrtHI9DPjwuhV4YHscubpeSlBLbMMmNUlzK4E/o3zlylrxw5g79O4P6ocLTVdmoVfZdbPsTuUV6zpqFPx0n7V+/Zj1rpcwu9CaWvVVYrqpYs2bN+iNVD7Yw/d1FPVeJrlw0NILtqkuruncxzFqgn+oWsMb7iqJ3ovw5z2JNXpRJJECryqMBkxpr4x5EbIK+dD2qpre7QyTmIl+1i9NX7ULp0i6NOuVM4theTSdehdASGFcy6tZ57suFtgeXrnjQnPLvbIVl5ZUvnCkoWLyQRli6opijJ7H3qlJ65ggykN/JGyuK1q/EVB93V38bwHpHx0MqMKs3WB7Ir5+hh8Z81VzghqbQAlIgHY5C7cLU15ck+jeUEiIAsZ7GZqrHAV6ftDFpSq1gMifTuwLK6+Yy15TDeTame0zmGnEitiiciWyZKYbB+ETJpij28cmMpaY+E+Xrcun7TQMjbWshuSR+4QpLH7Wy57j0pcWyi9XldKY1ZAeU5HYb5cWo/6Sz09eWJXxF/jnjwBKycMWBmeTn+wlHXp9+ZgoatGTbF6hB2iHy0o408quUsaMZ+c0zNKRxdNVXgw2RjVDHTKfTKd1C90iD9efWkyj0ObvQm+wRdK+q/Bz7IzubqBcdzjNv4fr9cnKAVQ4CKCU8LqgHo3WC+m/rRQUoUs8NVsw1sAXoY3o1nPNgSsPZrkAFjFeKupluIoaU03QavaICiMsO7JY9Y3LISQ9a6kFtcl9EHrzjLTn97GnyJuo5bzaqGkmDj4sURD8+82V8wNv73HnOThrJ+xSfBxcsVu085hV1TjRNrkAH103BigcKVhxYJMy0N5wdmVWKpvY7Ojo6IVrK1FGvmH2P5lxJhx9BvxbWAslngSxQU0dv5ARxqR+ZLx/aMWOsbfbsX8kXBpX+BaHIf01YbJs85Y8HDWgeY4vjyHdvxG2NQg1RyNyl+ciAoqO3u66eyF8KMrPWygmqPXUhClzQCI6J3QXFPsfB+kSf2qAR4ghdgjq1AeWjQQNTg5gGUqau9Ri3G/TpSPZ0pCkyJpJNvfbp2ApmaqbGolw1JlasaYjhBObIGle6PifLN+BZkwZsTdkjFvYCvjkwqai10yncBNldTiM9GGKRm64UW69EFEs7dKIdZy7SP1z34Dep374r4XP3J5LlqKPsnYzXZnj3oqH7vZW4+4ASsps1FJNaFI0o+nHh1KLEZkU/o6PJI4qGovuDmMQ0AZB+pSsXAWPFDV/c0uoKeBtilkMbcqnkZxzYVK3cEoclCNB8oI936KKzMlIz62ItudxsN49Noz1S6EEq/7at+Urz9ZafP0TffeH9Hv2Wv9nuPdkcW1v8TB4kSMWKpd/MEvWQ93wIHp+PJg4vORVQAghiqr+XI+gcomCF2BBNBBmsZkUDr2lExXqmghNl6mdVt8LntDhZUwwtoeLXv9lewdQhlM/Qwowgm6cisBOiFLPWmZIF9AbOFGGpkBR6YVXwdqOdXsypFnOKHIFXkV8O9J30I/07U0n/Tl2RpNE3yKWdFvx8jpqzgV7QUFI9XZ2+gV68H2NkQoFDfN31v6HWygnDVahTV9Rz/9o+cTsVay2DuAUAgQkSwt02O/O5HGDmtUMsK2nALNywAHWrcfUDpHhwyWpP4RbskZDxE4+UG0tWkLtHL3+ClBhvMi6PJT99cPECikST464A5hoq8SqUaJgspiLEhKmB1yizNJwiCJzB15jhUHhQNKP06wZs48/a6bMmdmpDxF63gu+jteBjalTbDa6KHDx9jf7hul8jC/ntn9TE9iEH0fObtu8uJJQVTb5D1pKlxfjO91f//AAtRfFvLJ9XjADBblwgfSMxD7yeLk/pYBAc8mM1f8MovrigiHe6GYkGww8MydHFVJpjd6it3FfGmTVR1cMg5sL4rvhgn21dJ88b3nPYO6Ctp/Qe739SF15VA7RePwFs/v9THxSepXosG4WL0v/fDiksQ1u+b9+1k1P3Refnzhr/0Ue4W1kZ7ZQy/HB5682JEyeOKKximV7ez0X6is7HAcN1QGeUWOIu7l/iMC3+rXCNgoNsYCZJqyLXhuZ6iJxTprzUYm7Pyw8eePbtQ2cOjkFNPcoo242JdGx0qH9461jr3xsBINgir0TrDK0gAELoGLVTJgTiTSe2kjwDDK36j8pZsqDXW8AYpfTwg2QHA6ToyE8O/xaSsoIeoZKWYsZdFWmknESKoD0A3ifFPJ4b7vBPotgFbrjNHsa5kGG2x1PE2Zf+99zwxzLDq3/CG+no4iFXHJb46xoaJXwu6+Z1ZD6sgq0gZfozwMFYwwDHIgPcj/qtRsazLMz/CQMcXf03DHDM/HZ8XLI/8osajn/zixr4Mb+oEWzw/0UNKkSxbkQjDrMR9504sZgsNaA528jCT8yo6YI9e8ZiA3Gg2PqAoJBanmAp7om/dyMFexfiuczeSFAit8VTDNNA4h07pold/msgsgxjH+NIYw6DyHhXtSMZuA8eiSWfKWpr1nj6GdAHRgJj8AcIqGEo9QCMeiZVXaOelG90GUVk7+FJQgdP3pu2YHTXjqOyO3cdPTCpgYsDfIZpx/7SOXtEty7DKcaX2LJBfGJydXXNr/xgA5g5UtQQQP4r589Gwtj/7hdsrsmIcjrYYYuMcnXrxmpoQeh1pviltErr+8ycvuk3baDHiJ6s6ze1dpe2b9e1/u5C/nbl41/QV7c/RRF4YxGeV9sDHG8kErL8lsl6gJPo/7fmgoD+SawHU12YANTREvJtgv8hMpESmD8Wzg52E8dM7EIAjypUbKpp8xoioER1tJ6kYj8bzcDTABTPJQ+EdlF793pQXfkGuS80jZJvFBUV6bqihkNPHSfmkU6R4UGYh3JiX0fOgzIwT0To7FTh4wrxBU/hfaOlvQ9O377NmqeSZg+ktKorUloR6lhSQk4Aqv6R9vuYqrSFSJguNEvQ7eBibw8haEM+DF8FBWXqx2EWFi6A+0yKj3jH3F/0/zV2FeBx3Ep4dN7TnYOGMzc5s8PwHEOYmZMyM1zytYFXZmbm1hSnjD6XufUXfFRmZmau69snjeRZ7WkLHyS2/N9/o9nRrDSSZpRhYA6QvIA8IHW9uUA+/bQ3G8hrr+l8IA9fnerUwQ+25OqHL2bcdVUlhci4ULW0bxaBWWwMq4eYP9lvsl9UFKcMQB/JniA0jYZkfx+6ntBNsD2AeyA30eWEbofNbILFPcAx0Lyb0An4VXAXpHFnOz90lMj4KfFfSp9oY8vYdOsTA/gPaKzeJ65Qn4AIiGt1rFy0H52aJSsoiPYabD+WPef+LNqxTkBkmmgfqnQJ3WwGxMx7A6QdG30kOy8APcCHnkHoJrgiAJ3FTXSE0AnYJNAFaegcTzvuOwJ3KkozUsnu3kz8FMNKhrU0HQCh5Qb6SKgjNF2PSXKFdj8VaJRdo5vcaQHcUa7QLwn0PpEIoRPuGk92QvcRsseU7CprOlrOP7TldLMJtt615WCuc7TKWm3xK1ijRtNBimRZNBh9JHs3AF3uQzcSugk+D0JzE11J6Hb4mE2y0BWm3LyH0AlWIrgL0tA1Qi9jtF4w0zOO1vG6p8Np/JHPTMZQdht9JHuY0HSoIZnnQ9cTugk2BXAXcAPNuwmdgB+80UroIiF7hZYdsw2jNJO1NOcQP6VESPbV0mAe2XBKoGfrkfcigEbT4f7ksEwLrbkPDEAPN9EcNJpD0+EBWGYyf0HY9oRjYUf4sJtJigS0AEBBGnoM+6FjvNQJSbIHfaINfoS+1idGCC3W+z6xD34CPZho/FK075maJXO5iva52oNNRQ+GGUhRM/O1HjeTZuiAbjKOmrHRR7IdA9ClJpoDolGPewdgmcm8mZgTcBHpxkNXCd2M0v5LppQ6JCxHxwXIPutC1+dhJD6sJbkKINRgYI8scX2+S2K5wrpPC6zYl1dY9F3Vrs0cZQr9qEDPDm8idMLdWaAL0tB9GfkulUEQLWaFspj9HEuWPMWu8vqhvlfqpyOk871PJXpQZjD6SLZ3AHqwieaAaHw6hwZgfXJ8Qdj2Ax0LG/dhN5MUCbjGe5KErhAaGaE1glnKUO7ddC+3ktx07zaZg3Lb6CPZzoSmNVQy10RzQDT2cl+bGbVNzJuJOQGXeJITulBIXqYlxzxaKMteWpYSAJ/PIskJvVmjOSR2Ina8ByCxBYK91JyN8K9o/rIGtrIpkJtWlqHfG8bIDz9InmjN6ihizctOwzQWmSMDiLkFfmANFnN/H/MrihnR1wKzuIcLNFbqSi3FSl35UASHBGx10L4h6chXYkUe84lkmPPm7GfkxUpxik/X1co1bqPkx3oLIvoPATXgDUrxT+ib0Mhq7zjQrWerQl8bRY0vWd+LDgddspqtlyW/fk+EbsU85amlmKd8JDTAJX+Wmpz2Ant/GSp+GZqD+6JqJdAZcgr+RsLyoSKNYYZ5tHGUL315rZm46M/Tl6fposbLZl45MBKUzbzMU9A5Oq95pHp2UGJzT1/f6BTnrqvqi0V2UrNjHAVb2C4Q8+/3JOP6zY1ZxXHMzNXoWhozahVK7xDi3oW4m+CZIG5ucHNAbhztkwOYmclcRMyt7K4A5grHlLoLmRW6JEDqShYsdTN8xHa1uMv+QOrmlcxiLtfMWCMNZ9ZDNHMrm2nNkko0s9h7DA/nIaiGeYh+KuOFcK74ufMbmfIrHpdxCvGP/GntvU/H346H1na+Lf+EKcGWitbOp8Xf710a3ycu4vv7Suw7olX+s5e37uC/0bpjDVzGFkCuMRMnT0Jv+QdpRrBmT/JRdBkojljNHCkm5hZ4gs20mAf6mF9BZoU+F5jFXebjdoi7la0LWFvlOubcpAu5FXoSPntrboJVN29NLcXacSVwlOX99Gl0XzbgHOsKtDpsWaxDiFR0NeTLrtfH8xX5XvJeqjGX7g99Nefme+P9+p69jPpzNLzPOwxL0eENgdShmKO+CkbCcWCfEMFXruwErRrwLgIec46SkJ3DcvAE9DBxGXbY08OEMQ32upNjnk3vrFLIYv8N7yoeqU3rU7Wdxr43iX3Gh3PXM6+X+7+W+tGX0j7VpRPaP3Z4PXV69e4OK/u6zExvH9qgktsHrMeb4TY207KZbB48923+J0u3GBrTWIEPvcVw7eO22Z6I1pCYwR6ZFyoftxNY88caH/NoYm6B79mukOtn7ijXowKZcQwt1OhTaAwRd0eNRBN3EXG3spsCpK5xDKlxDC3U6Fqw5R7RK3ePK2sSKm4QfottTLVR3y8nlk1sOOzql1DPcihKgE9shNbrtzTKqdYMRVBwXh6ZLtCLNHoQmw6ZICYfHTHF6D4AEDouMooiFe3uJDbHioJEVJ/dZoHeN/yZWhsguhxCVp8jTKHvF+hT+G/EvcadQp7UO1MU1pI0CfTB4fuRW6ErgfvQhQb6C4GeGSkm7hZ3FZtpcUc0+jmBHhp+GbkVejmAxa3RUJjalR0T7lDcwGHDR5mCozu1lB2KT3Cxat0usbcJvjMjDsnRCoMC4kJ9tc08IN5evwpPimhZESs0EiTLhWIevQArfy3G9iXsW2yvExZ5WqROsI9ST5CdwOo0O11iTMY4sstbB6HxaO3XK7Rb675irSNytCy39rjhMPZytLbIK9AiLxSW2g9H41Ldno3tG2TtQhx5Y3S8rJqNtWKbUT0nktfnx2HccZlGF7KrfJYyGFeoJIusi4jc6jtX43fu0uPKPP3Igu1uN7arOopJLYvEv+h0QZY/FoPM0qru5CFABkTuHM4VP3fGo3KqIP65Nx4dHRWzhLujYsYwOjpVlI7ufDvK1t2/T/SI6MnRjHX3Ph19WwKWRuXkQX5iaXSfqJw8SIpvBJTmDWYfWtmjPZu1BG0clATY3thzP43lcRTxO5L9yOp9HpWi1rTGTuEaW6H3CPA2MU+fsgaj4kZ9PoN6u6DHlbn+FQu212K7kqWeZGlmeazBehMMNP0KB1rvNx/PLEnyKZogsQ7J/ZS7bzgPuNyxMSKC31BEcA18yqZBri8iqGc5tBJ/kFbtaw6m2RZt/QzSWGSOZBFzC8tn4y3mch/zK8iMaGHBzOKO+7gbiHsjWxUQx6yO/iBut5n8LvFvhE8CYgjlmT90DNafwCqGaB/1+omfErDzUOzZR+g5tI+dFRruB/C9uyR/lraPW3pcWSFRcaMdHIB2sLLHlfn0kQXb3Z+xXclST7I0QxtrsGQZpO3jACHLfzkgC9rHy8ySJIcpLNY8ROYG3csLWaNleUN1LzHrPvZyF41eTr3UqfclOtPkbiTuJrg6iJsb3ByQG2chewQwM82cWiwrNSKzij22AkiO1GxZFUBxYPte7i8S3+MSXun7SNTrPj0u4Wk8BkjeDHey8Zbkw/9A8ua1LF1yiu6OFZJcjU++UX/jwfiNmT2uzP0v2ndV7bAZ28eKnhIee3QJgMSnFoeuNfDHwtfYjvua+DwbteTtAZ6kv5IcKw58wY8F+lZ2Zfg8isyXU6y9HZ5kE6w4fr5jRrm+oIhY+56O9daLMTOK/xUxr4EuikARc0euHOfE/CAxr9mb/A1lz8uRWJJ5ADG3wNdeBIp2d/N9zK8gs0KfD8zijvm4LyXuNraQTbf2HvI5RdoUP9+D+NvgY+hrRf5ijvY39B119B0b2Szc37D2TjqKvO9w+oVd+o6N8A76NCtuiZfL8H5h6nis21kKK8E7GbZD0LqLMjYVysQsnU6uPHnjX4F15KbV7s3mPG1BZRX3PO/063uXUEvzzSqfZVe8N3HdvmrZtN9KZt1BFdGzj5wJdK7wT9ItxcUv8az05eMf3PrTacfFBn9WDta4yfHfwy5L61Da1dTsjOe8NeFNxv1UWgJenDjIV7bCdVVlURyjE/WscjOrT5/z074X1qBA77KHRleSz6XcNMmBTKFxzwu5Jys0XBa058WN+DEHih83VREzxY9jJjPvJuYEdJF9evOlLIfsU1XjxDfoFP22OJtkodUSzbCwbgO+W/bW6LKAmH0/fLdobv4LcbeyIwK4sx2Tuwu5FTozgDubGdyReuJuhptZg8U9kBvcHJAbvf90ZjHrp6NyAeKe96mqj6HtdpSI9kcx8xiO77M0+jhAbtPkk9O0RjBLXuQkgT5d6+9Tdoov6ie5R2huzOyE2j5XoxusnR16k2uLHUcWOys0IsBiY1HDYpF7D4Vm5wfMhQbY3LqXjwTMs/Jsbo0uDhoNJjfvJu4EzvEL0uQu9vaMNf9m4k/gfmSBT3YcEx2D/mCXeRb8GrCO6IPyW/s7An0B2GMuO9NbUU41VpTN7nz3VXtnyovk8hUoyVitm2tZvbUWztaSYDU1lGS5Rt9pr2goar5DapXcg6FzLDewkwF3clKr5K4G7Q7fAFsBtZJqdx5B/GRsv8l5BAD7H5Z1YrD/2B7ewT2AtPgwafFG5wE2x9JipqlFfgayKPQCyLK0mOXzieXE3Q4XsQmWT+znmE/oC/KJ7WWOD0saV5VCnTu4tI9yOBk6YkYO6T+vATQwJk/1yX9yM2I62U6W7xScw/tjGcj+HP+MlxW474Bf/7Qq7xW95UPrsL4XlmOozatlXnUv545HVSVRWVQ09SuLPPTo76t7i4o6z3WPwnKiA2RxUcbFObnfb9GVRdXc+r/YV4z8Qw1sZxtCc1kEZkKreyBEoXP0YB3BzwFwRuOzH4bPeLt7eupktKGlPhvawE7QNrTUZ0MbYBO235razZmD+KEaPwH6yEiowH+P+Pm6nQP8H+dLiG0AeAFVyIlBAzEUA1EjafSd9F8ApbIGcr3Zw/Ja6+t6vm/3rCXJZSo7SApPEpDdC7SinPG3dkFRYg6DhDaArzJJLFdQ1LOZGNtEcjIz2RQ2QAUqt626tEoiK/ZSR5J9xMzc9zDQItDftdSC+w9Alz7xTheekvJReeozPUxQQQjjcqJ/+cSLT+XVHgI57X3miegMwgkKrPUDInsISgAAAAEAAAACAADiktOWXw889QAbCAAAAAAAxPARLgAAAADQ206a+hv91QkwCHMAAAAJAAIAAAAAAAB4AWNgZGBgz/nHw8DA6flL+p8XpwFQBAUwzgEAcBwFBXgBjZQDsCXJEoa/qsrq897atu2xbdu2bXum79iztm3btm3bu72ZEbcjTow74o+vXZWZf2ZI6U3p4f4Ck9+V8/0S5ss3jJOpDI1vM0D+oI/rQz9/N3P84xwTRnKQLKCpW87BvgxH+wNZGhqzh74/SnWlqouqq6qMar1qtqqJariqt/ueue4GjpfdqS+9WSunMDc8RqPCqQyM5fXff3FFLMO4WI0rJFUN1utRTIw3c4U/mdtkIGWi6P2mXJH8rc9uVk1nbNwJ4xDd++VyH83lUU6Pp5HGfTmosD9VolBBnmVXeZK2/lCWh/ocp/x/aE/1cDbiJ+jzjvr9FFI5jc4yi25ShS7+MSrrve7Sn9T9QIn7IrtPdlH+wNmFwCIZqO8vpZPYdynd/C3Kw5Tn8H8ZwPzwPocngRPDbxwfnmAfZXt9p7r7ieuUe8YRzNLzRdJdc30pneLNytc51H3FCvmcjrq/vkkDOoUVrAgP0FeGMi1pqPevZLz/h5lSlx7+O2qqqvqZTJL5rA9fUMvvwwqt6Wi9PzFcpLqfvlrPNkkZmicVGKZ7qV2YmP0otelg+ZM7uVQeZFHyAE3leqbKMurpvzrJ2ayK6znY/ckGGcV6acYR/niOiIu4UJ8vK1xA/0Jteri/OT/O03zdkX0cp9JHlmssS0nlJ+b7kN0cHuaKUEIaBjLD8uivYYI/gTPCo0zyf9PVd2Qq/NPVffdP+VidC5NqLHXr6K46za3hKP8y/f1bVPYP6PmNLPR9GazqoLFV0hjLWu6SNhyaLOWy/43l8kIvKiQnkspUusU3OVSO4AQZzWGxPl1iM71ezuU+aJ2H6vkiKrt/OM9ylefS/hlWs0RrdK71hnk9dlGpZC6Yv/w52c/m2S1KfWweLpY/OXtffXy98gvVq7l/N5Z5t1jmXfPnFmWeVb8Wy/2ZPap1W618TnV37tWNZT4tlvnUZDHYvzemxWXrbZHau3F/ulm8to9t0frbemyL1BxZ/2m+btM4zlHeqjxb+bXyRc3nfu6H7C/llckabgtvUmJzwnxns8L6VZpygfpuhfIKZTujn8fZYnyGs20Ny8/GlIHZ3VYPy9PGtFlj/V7KVqXsZfPHZsA2aR6yOVHMR/i/1dvqsL20+WYzxjxidcvnnM2ajWk9bz1uMVh/599uzPxflkObszbr8vrnzzbhBRqTaTB75O/mNf4PGySVPAB4ATzBAxBbWQAAwNi2bfw4ebyr7UFt27ZtY1Dbtm3btu1Rd1ksVsN/J7O2sAF7GQdxTnIecBVcwG3NncBdzT3IfcT9ySvH68E7zCf8/vzbgv8ErQW3haWEtYUdhOOFm4QXRRnRJbFe3EV8RCKXVJQMljyXxqVlpL2lZ6QfZMVk/WTn5Q75YPltRTlFF8UmxSMlVk5Q7lF+UdlUGVUNVX/VLNU2dVo9QX1fU1SzRPNN20W7VftWR3VTdKv1Fn1T/XqD0dDDsNHoNHY0bjE+MeVNfU37TN/M2FzNPMl81SKztLBcs1LrHOt2WwPbeHvOPt++2n7CMcQxy3HJaXa2dD5w8VwVXT1dM1zn3Xx3ZXdtd1f3ePdSj8TT1rPcG/D28j7zLfEb/S38VwMgMC2wNsgOlg+OCF4NZUObw1XDg8KPI5UiW6KmaOvogei7mCtWItY+Ni52OPY9/n+8U3xN/H78NyNmtEyBqc30ZUYyU5mTzJuELBFOkESVxJVk1xQvpUqdSWfSqzMVMquyweyA7LMcPxfKTcjdy/3IB/Pd8g8LwQItzPt7GVCBbuAiNMLecBJcCvfAy/ANEiM9ciOAKqNmqD+ahlaiA+gm+oCl2IMhroJb4gF4Ol6FD+Nb+COREQ8BpCppRbqRQWQmWUMOkdvkI5VSD8W0Kv1TEDzACAEFAADNNWTbtvltZHPItm3btm3btn22hjPeGwbmgs3gJHgEfoIEmA9Whq1gJzgUzoab4ElUAB1CN9EHFI4ycQlcH3PcB4/HB/B1/BaH4HRSjNQlG2lJ2oBy2peOp8voXnqFvqbfaRzLy0qzRkyxAWwyW8UOsjPsOnvHfrEwlslL8Cq8ARe8Hx/GJ/Hl/A5/wb/waJFLFBLlRFNhRG8xTiwRu8Ul8VqEiHRZTFaS9SSTveU4uVTukZfkPflKfpNBMlUVVuVVbdVcEdVLDVIz1Xp1TN1Rn1WUzq0r6Ja6kz5tipo6hpheZoxZavaYy+aVCTQptpCtaaHtbkfZhXaHPW+f2f82xRV2tRxyPdxoN90tduvdbnfJvXQBLsmP8Qv9Wr/TH/UX/d0sCRMZsgAAAAABAAABnACPABYAVAAFAAEAAAAAAA4AAAIAAhQABgABeAFdjjN7AwAYhN/a3evuZTAlW2x7im3+/VyM5zPvgCtynHFyfsMJ97DOT3lUtcrP9vrne/kF3zyv80teca3zRxIUidGT7zGWxahQY0KbAkNSVORHNDTp8omRX/4lBok8VtRbZuaDLz9Hf+qMJX0s/ElmS/nVpC8raVpR1WNITdM2DfUqdBlRkf0RwIsdJyHi8j8rFnNKFSE1AAAAeAFjYGYAg/9ZDCkMWAAAKh8B0QB4AdvAo72BQZthEyMfkzbjJn5GILmd38pAVVqAgUObYTujh7WeogiQuZ0pwsNCA8xiDnI2URUDsVjifG20JUEsVjMdJUl+EIutMNbNSBrEYp9YHmOlDGJx1KUHWEqBWJwhrmZq4iAWV1mCt5ksiMXdnOIHUcdzc1NXsg2IxSsiyMvJBmLx2RipywiCHLNJgIsd6FgF19pMCZdNBkKMxZs2iACJABHGkk0NIKJAhLF0E78MUCxfhrEUAOkaMm8AAAA=) format('woff'); +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: bold; + src: + local('Roboto Medium'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEbcABAAAAAAfQwAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHUE9TAAABbAAABOQAAAv2MtQEeUdTVUIAAAZQAAAAQQAAAFCyIrRQT1MvMgAABpQAAABXAAAAYLorAUBjbWFwAAAG7AAAAI8AAADEj/6wZGN2dCAAAAd8AAAAMAAAADAX3wLxZnBnbQAAB6wAAAE/AAABvC/mTqtnYXNwAAAI7AAAAAwAAAAMAAgAE2dseWYAAAj4AAA2eQAAYlxNsqlBaGVhZAAAP3QAAAA0AAAANve2KKdoaGVhAAA/qAAAAB8AAAAkDRcHFmhtdHgAAD/IAAACPAAAA3CPSUvWbG9jYQAAQgQAAAG6AAABusPVqwRtYXhwAABDwAAAACAAAAAgAwkC3m5hbWUAAEPgAAAAtAAAAU4XNjG1cG9zdAAARJQAAAF3AAACF7VLITZwcmVwAABGDAAAAM8AAAEuQJ9pDngBpJUDrCVbE0ZX9znX1ti2bdu2bU/w89nm1di2bdu2jXjqfWO7V1ajUru2Otk4QCD5qIRbqUqtRoT2aj+oDynwApjhwNN34fbsPKAPobrrDjggvbggAz21cOiHFyjoKeIpwkH3sHvRve4pxWVnojPdve7MdZY7e53zrq+bzL3r5nDzuTXcfm6iJ587Wa5U/lMuekp5hHv9Ge568okijyiFQ0F8CCSITGQhK9nITh7yUkDxQhSmKMUpQSlKU4bq1KExzWlBK9rwCZ/yGZ/zBV/yNd/wLd/xM7/yG7/zB3+SyFKWs4GNbGYLh/BSnBhKkI5SJCVR5iXs3j4iZGqZyX6nKNFUsq1UsSNUldVkDdnADtNIz8Z2mmZ2geZ2llbyE7X5VH4mP5dfyC/lCNUYKUfJ0XKMHCvHq8YEOVFOkpPlLNWeLefIuXKeXKg+FsnFcolcqr6Wy1XK36SxbpUOLWzxg/tsXJoSxlcWgw9FlVPcTlLCLlHKtpAovYruU/SyIptJlH6ay0K13Upva8e/rYNal2OcjWGB/Y2XYGIoR6SyjtOOaBQhXJEQRS4qEvag51P4ktuuUEzGyjgZLxNkAD4kI1AGk1Ets6lVSjaQjI1ys9wig6iicVaV1WQN2UiOlxPkRDlJTparpIfqRNGUGFpIH8IsgQiZWm6SW6VGpMxiMlbGyXiZID1ksBk0tasa+REcgrWbjua9k1ACbC+aMyG2RGONorqd1Ey3KvsMmr9WKUGrtEHZP2iV5miVZrPN5uFQXa21FgShu/bK9V7HCz4/+M4nBcnA9ltfW25z7ZKNs3G89bp3io+47JSdtbHvkX+Ct+dcfK7+Bdtpf+h+/o1trsvLQPQzsat2+pW5F3jvS5U0lhdi522PtbA9L6zn5efGkM/y3LsGAHbD/g22Tyv213N1GtoduwmSRzWG2go7BIS/cix/ameH20SbZFOJQFgyAFto4y3STgLhds2m2LIn+dtsB9i2JxWyA9hJ9fuNXeLF+uvtiB0DCWES6wxgl+WMN6zPWQDCnu6j/sUmGs+LuV1spo2wdRZrE4gkiiiLfNTvJRtgJ9RHpMZ/WqP4FIBQVAv5Qp3L2hFe3GM7/qa/5BWxg2/Iv/NsW7UG7Bzvdb0p326+Inb0PesfeLf56q+7BkDEK/LaAQBJXldHI9X96Q6+dVSX3m8mGhvy7ZdDbXSCE0YEqcn86BTP/eQUL0oxdIZTEp3iVKIyVahGTepRnwY0RCc6LWlF61ee4rHEEU8CiYxgJKMYzRjGMp4JTGQSk5nJLGYzh7nMYynLHp34m9CZz1YO4ZKfMOEQIRxSC4fMwiWL8JBVeMkmfMgtfMkj/Mgr/CkgvBQUARQVgRQTvhQXQZQQwZQUIZQSoZQWYVQS4VQWEVQRkVQTUdQU0WjmujcQMTQUETQWSWguktJSJKOVSEprkZyvhYdv+A4ffhZefuVP3WPRaUeiCGUEYwlnvIhkApOJYqaIZhbziGGpSMoyEcFykZRNwmGrcDgkfHDkP4WQhQ3EQBDE9pmZ+m/pK4ovGh2DLW8Y/0wRrZ3sTlWy/Ut6kPnlj7St3vzVJ3/zxZ878t9iVrSeNZdng1ty+3Z0tRvzw/zamDuNWXr9V2Q8vEZPedSbe/UNmH3D1uu4Sr5k7uHPvuMCT5oZE7a0fYJ4AWNgZGBg4GKQY9BhYHRx8wlh4GBgYQCC///BMow5memJQDEGCA8oxwKmOYBYCESDxa4xMDH4MDACoScANIcG1QAAAHgBY2BmWcj4hYGVgYF1FqsxAwOjPIRmvsiQxsTAwADEUPCAgel9AINCNJCpAOK75+enAyne/385kv5eZWDgSGLSVmBgnO/PyMDAYsW6gUEBCJkA3C8QGAB4AWNgYGACYmYgFgGSjGCahWEDkNZgUACyOBh4GeoYTjCcZPjPaMgYzHSM6RbTHQURBSkFOQUlBSsFF4UShTVKQv//A3XwAnUsAKo8BVQZBFUprCChIANUaYlQ+f/r/8f/DzEI/T/4f8L/gr///r7+++rBlgcbH2x4sPbB9Ad9D+IfaNw7DHQLkQAAN6c0ewAAKgDDAJIAmACHAGgAjACqAAAAFf5gABUEOgAVBbAAFQSNABADIQALBhgAFQAAAAB4AV2OBc4bMRCF7f4UlCoohmyFE1sRQ0WB3ZTbcDxlJlEPUOaGzvJWuBHmODlEaaFsGJ5PD0ydR7RnHM5X5PLv7/Eu40R3bt7Q4EoI+7EFfkvjkAKvSY0dJbrYKXYHJk9iJmZn781EVzy6fQ+7xcB7jfszagiwoXns2ZGRaFLqd3if6JTGro/ZDTAz8gBPAkDgg1Ljq8aeOi+wU+qZvsErK4WmRSkphY1Nz2BjpSSRxv5vjZ5//vh4qPZAYb+mEQkJQ4NmCoxmszDLS7yazVKzPP3ON//mLmf/F5p/F7BTtF3+qhd0XuVlyi/kZV56CsnSiKrzQ2N7EiVpxBSO2hpxhWOeSyinzD+J2dCsm2yX3XUj7NPIrNnRne1TSiHvwcUn9zD7XSMPkVRofnIFu2KcY8xKrdmxna1F+gexEIitAAABAAIACAAC//8AD3gBfFcFfBu5sx5pyWkuyW5iO0md15yzzboUqilQZmZmTCllZpcZjvnKTGs3x8x851duj5mZIcob2fGL3T/499uJZyWP5ht9+kYBCncDkB2SCQIoUAImdB5m0iJHkKa2GR5xRHRECzqy2aD5sCuOd4aHiEy19DKTFBWXEF1za7rXTXb8jB/ytfDCX/2+AsC4HcRUOkRuCCIkQUE0roChBGtdXAs6Fu4IqkljoU0ljDEVDBo1WZVzLpE2aCTlT3oD+xYNj90KQLwTc3ZALmyMxk7BcCmYcz0AzDmUnBLJNLmoum1y32Q6OqTQZP5CKQqKAl/UecXxy3CThM1kNWipf4OumRo2U1RTDZupqpkeNi2qmRs2bWFTUc2csGkPm0Q1s8MmVU0HT1oX9Azd64w8bsHNH5seedBm6PTEh72O9PqcSOU/E63PkT4f9DnaJ/xd+bt/9zqy+MPyD8ndrJLcfT8p20P2snH82cNeup9V0lJSBvghMLm2QDTke6AFTIsiTkKQSTHEeejkccTZeUkcYLYaFEg9nCTVvCHMrcptMCNuKI/j4tbFbbBZ/RCC8hguw/B6fH6v22a323SPoefJNqs9Ex2rrNh0r2H4/W6r3d3SJ7hnrz1//tVTe08889OcCZWVM7adf/Pcg3vOfi7Sb7ZNnb2MrBg8p7Dba2cOX7Jee6fhjy+tvHnmqCFVJb1ePn3qzYznns1497K0c1kVAEgwqfZraYv0AqSAA5qCHypgEZilRWZ5UT2PYsgNdAxLlEcNYjwKajQGgw8Es+JcAwHH5qETLIgby1WDHhpXgAyPz93SbkOsep7hjeL0eqNVIP9lTHKRzEmHdu0+dGjn7sPHunfq0LV7h47daMbhnXWvenbo0ql7x47dmLCSvrRSvDNw6uSa3oETJwLthg9r37v9iBHt/3lj9amTgT5rTpwMtBsxtGOfdiNGtPujmzivGwjQpvZr8WesjxPZUAYhMK1F/0qJXHRyLXWOAx0H50dxboQfxapphKtHGVUGHf1gc6PC6GkIo0NCsYGDIdUo5n9yHFb8Uz0qpyqHT8qpyOmZI4w2c1RTC1d7tc4anqdBGhkdmshNVo7GA2MF8+opFMrXcvAt55yfJNbVj8SKVhCJpBCfz+vGL5mK0yVjQRtLLX1+osicbALyzY/jkdK22by5e7c3z+x5acqYSaSkScEL3Xs8T9l3/Qc8NvUqY+SjNsv87OFG3YpXpZYUzytzDe7coy/ZsiQ4Yuzd/U688NSmCXd17sZub3v7oC2fjfhCGltW8VnjxjpZZy+dWjwpIJwormzTK79/iW/wBAAgqGEiyZKzQISGiQpWr1h4SISYUkm57FNqBQIBVkr3y8NAQ+3D36A4IWQV/JmZqJw2NT1T0Q3QAqTsQblg41NPbiqQH2Iv035kK206mGysZG3YMSs7xtrMDAyhTcjWSC4axqy4LiZRQdFdvnTNq1KX320HjVawZx6SCzc8/UKgUH6QtKPt2PKac4MDleRlMsxKBpFXpq4ZVBNmKyIxHbSvMAF1NBWyAQPW6z3nEIpfMhe2fL8kuIX8TClDEQQX6cwueUmTlNNpRPey/31uR/D0LuH14ccWkqFs//wTw9hv00gu+7IyEr8T3Cw2Ex+EZHAAktOEiPrIJO5s8hWcNqema06vU3PT02QFW/8NW0tWfSM432N9SfA9chuP5WOfkxnwHUgggyki+HwUXGw8M+65u8v3uexl0v7FyJpdaRIdRN8AAdJ5nYKQIGi4CB1U8zNNoUnPR3X1LjTb4EsQYnsMWACwJO6xk7e4bT/99GX0N7R2ndAo0jMzAOfHN02cnKkT94fv09bvr5QLAD8UpuJ51ev0rCK6SgOc3gCn19OKL9lADWokUbkS0ldBzwNNU8HdEjRXVGu0qPKIei288y5jBN59h9Cfl8yfv3jp/PmLaAn7hF0izUgO6U0cpAW7wD7NP3vy5Fk2o/rUyQeieM4C0DcRjwS+aHYSJiRhdokFkVRTjNUkvr1gffj25dM3f2ZXqEN85awnGncAgOhB3A1hQDSuhqG06+MGs+MEg0I21x4BImqiqcGk+kF0sY1xoc8M45pOL4mpgk13GVCnJSTTKXr+KSPXFgybNz6w4msqEctn537ZcSt7XKC7j1Bp9YE+E9bvXiU/S5K+eGzlJwfYcRkI9MM9smOuzWDV/+9pGmaYlnq9hLYFMjf0Fje13Izl5ntACdyDxkxTg0pcymnYlcImJDTWkK0ZcHQO3nrRBvWETcbdrEfVuA6VHa2IuhjrtnyGTjYeWzR1zsyJK7+iMpFevcjmTVuxkH176VX2rUy/Wls1d+3ilceELgtnTJs/d5R85OMrL40+Xdyiev7Ln15+Uh6/ZNmc5Qsj/CwFEIfj/jeANOgFJknoJonXwOrVZBeho02iBmkcTDlsEq4XIUsyjQo+3p84FpvOj7aLuIlTcynCvocf/qlml0xn/1WziWySrVR5nj1BOt4mXPlnKO1Lm0d5sxb3wsB8cmFylDcEVyexVFLRSeV8JAmXnJAllfClLUX8xpYRRhu0x6VoUYM5CS4WP7Qol4xGbc5ACRJ8Pr8v3WalWOW2FIsc2wbl3kECqXmlRfO5Xd/44pfPn2a/S/TjFRPnLl42d9J4O90m5J9jt9zYlFL2x6eX2A/nn5Us0xftWbf+UPvWQGEBYukSOQMu6B+nMDE0VnSsHA0kECeUCrz7ItigIy5ra0J7xQK3tGcqRoQsNh92U8w/JhEZmLktBoMe7bO7rLB0epebg632jH3uY/bP+ffYx6T9mVGBvNsWTF8WkF5wOh7Pcnz4lOJvxb4//z77iJSSLGJH3RhW06N96dRHXn5ww7qD0f3pDCC6cX9ugKIoomQEkXw9VczkxNMLnBCUCoruT0/3oxKL7r/NJmk/p7m+evWfGuE78Vt2lRns9N13kx40+4fnAD8CjMf6NcP6ZYKOq42NrmfDJWy4Xj1P+cEsSLLxkhUklCwkOAq4oqQVOOpuIs64nGxq0JVQz7ij5o27pAixmy+WM/67KC2ZsngH++XyNfbLtqVTF/36ykt/vrFletWG9bNnbDTmjRwzc/aYUbPF4lnHCwofXvLa5cuvLXm4qMWx2c+eP//PkRkbN1TNWrWa/j1u+eJJExcvjpzFAYg3s44vfRL+t0nkS3xjCynWFA5OSSRLynVkyecXVH67ol5PpINovJ8YLr/dnoHXLW8MFxXW7i3ZMSj8I0l96SOSyi5/3XNvxxtbB5aMDNy4dsmE9UtPPfNIx46difLpNfI/7DL7kp1g37C3GjV6NCeL/NStbO2ps2c2bD4CALW10f4qDgYDNPymcCtU8R4uYw/H8WnY1+/HcReOEKGKyJDmBj5OcRwItIUhwnqhFpJw9xFg6CkFlTYXTfVqZdf/tfIcAE0d79/dG2EECYYQQBQCAgoialiVLVpbFypuAUXFWRzUvVBcrQv3nv11zxCpv9pqh6DW0Up3ta4uW6uWCra1So7/3b3wfBfR//rVcsl7+ZL73nffffs7HTFBR5D3WpvCDmUdIQb1I01myQTjoQl2MRpRl/r3hG4oVpCF83Vw+kdwei2j93o4WagRrjD/Nw7YgU6IrsgAfQGRcYCTLxUZur5kPuL/lYuuNgU1XoSa+ueEfPon+J1yrD1J7UCC+5VG3BHBHVHcEcUdlSGKO3nPyzABMdyNFOv48MTEyEXCyPp9KK85NAqGGrz6I7y65gckiwz3dgAI+xivtAIDOA3LqyxbS9V3By2ZYgWxj1KxdrMPUEhIZKJWxzrtdWqXG6lJNABmTO6TO6EgZ/pvgvDn0c+vb5z6WEvxzh24q2xeXq9VAwomDR8q2098/X7JuWGdhg3GY64xvHvgZPkLaR2wgixCI1vHWKJpbdGx3G7mDCO77O7d6Eeg+9T6IJEoXP9qW0dDeSvNbVsrcjvaUN5aC9pa0c2ZWrhMKvyhjOgmkGUyEsFkpRLVKsh0dyc2B5YQICBgIe/NBCIEGNktqHxMBISRCV+50v3qzz2L/GNX5i4ra+5/7cXJK/oKktUtLnpWmZsBf4zfwZ/i9d7NYU+YMLgiIyLr7Gi8AA/zaQ6/hPNgCdx2D3ukdEseEwlhjDkuaOZ8eO9b/PGA3n2za6oggAlxCaLjSGGvi6/CKXAHfhxvwhtxbhtLaVQsrIM2+DLywL6O+mUrO6a7GfRIcPf8hNHZAIBE7VQd8ASDAWfec3ESdiGTC5nSGsiiwiLUtMnjuEOk1kzFcI9JHoR5kz0Y+SwCsXdhGH0VKhzHp/+FzFeRz9+O7fCtL2Q4AL8u2e72RcFosiLP9wIgHmY+hxmEgGJg84/lVDxnGtpH+FMziw5T/GGx/Sx9V+NPbS1/uvSGcm/t5vGnTEK3rUG9y6yEYO1+tfpYOon3TSpILhmHhztfw/bCn2qhobiwdDW+fQN/CjstfKZ4Dj4A9dOWrFx2S7KdOD56V0TLD0s++Qptwe2eLpq+6O1Jo56aACCYSGT3GbIfW4Kuj9KLgIabbN50LDdy1C0P5CSL2U+190OAThfGG/zHkIjP1Tfgj2ByPUSwrYiu7925+a0D27bugj/KF/F1OBh6QhP0gEPxrZ/ljc/fsONrFTee28R4g67DL2Qd3IERJIOHLwGln4cGSUJdTxdyhgDi1AKL4NMYAdkLvyXzDscv4Os/X3r77Nm3JRt+Ef9xEdfgl8Wb97668d7lQzcAZDjMIDh4glxAaHWfDV1JZj/rSS1tOuz1hHmUcIAjHG+MklgeL6F9LCbnn+jtWIJ+rI8SzjpaowWoDFuPSrZKXAiAE5+ZjCY9wHwiifwfvmXsI9wJMhnuBBn3B5CRXWYPc85tcJTWCd84gtBCVOTYSOfNYvNOJnxzgfBNCMgDJG7zSAeR2NXUTWzOuYmcC5VObFq7NxloMKYVZwDIYliIk59EGoTQ8FMi1WHihc7472r8D34dZmIIYUsBXXXbuXHroZP7iteG4MvI91jOCtgbusEO5K+347Q8e+MPb+JPbT/Gt4ZtDjppKBnYmi4D3IJyT8WxGL/UbqKsmPH2vW7kQdLd4LSKMre9bogIAvLe7u0GiyvOul0mNypGuE2h989SwFg6lJAPH3RNyQJYyWiVDLWO6XV1aHWtQn/HIrSI4vwGGfYxf74lFwHn0WS/ZYX76uoIKFu35IbrwlVyYQCxLpa96kTTx3OvJq5zuRfv5Pnw7hyqq8P1Z75rABK6Pm/yyAWS7d6fZ34//7k8f/ry4ka6xjKbeygnyTXR9CbFOhNBTIUiJtZlQleZiHWo4RgPKCvqPoxRivhqEFpQ55fr6lbBkzDE8TtKxt+gmY6VhGRb0QTHkw6dul8oThJo+wjtwodgwulWsMINaHf91LqjZPMpvyPTOJQPmKOhI8f8PFG13EQvVGfduUdgdUUc7AqJkgqDxNrKgaMhs+eobTNFT+700efrUV5FO30KebG5Uc8EWtlONUbCMKgzknfwPPyXDJ+HyXX+Mu77L9xf9q8jy7JPHHm3L/wDzYL3tomF0LEaU3YHPO9P/D/xPpFcNlR9sDfKQ0VIyDvYAkWjZCRQzAmOFb5urd0QeRq30fSlk1sX8kKZEurossFEhcHnyoTDl8u1YiS69x3B9zwSWwMExpGYerP/TAzKwmQIe+FjUFIzXI7/xHfxIdgdStAT9q2tfHHfu+/uf+kjNJB8sB+OIDdl6AFH4n34L3Twt98O4jvvXP/tEFB10nkWhzCCLoBffFVBMRMFCoqJUu7Jo9qcQ5WQhel6UVXuFrihDj12C/rgmlv4Xfj4imeeWYHfRW0c30q2f05/8nfluilTqH6k9PKT+hJ6GYEFpCu4GMj0BlevUyth7YJ7K4qXwVBu5hBhkW1IDMiHUy53QO1z+HbC7IyHkG/FrwOur4fAz/Q/oGEDoWEgCAODHkFDdtGcXDTnCMq5zh4tAL0r8H4kpavGhqLpIBNRJVTz83QOvA09Zkyd91RIxN025kVT8WEYuGH50hX4HMp1PC/ZLpyZ9q+OkeWL52TMDTFb1nadMXVp5dSnJy9Q9tJwohNfko6pURM+HNWSXLSkiJtbsnyG2TXfxfFwS0N5+AN5LeLfk+CaalbRx3ANsgkVK167jf+BYVf/gGESurZtzbKynQeu38YXb/6EX5bQb+9sXLEFzhw+vX3GF6/ZfsL4bXnqqum5OZM7pl96/eA3tz6Xly0pAhAEAyCWMjs8lpcL/M4jdosEtVlJxXhgirkUP1GHnxBHE/PJKN6sVGi0nNDoFpObCZzc5HQCL2Jc1JAPCxfF+1idfOgj3sJVDXfxqbrX12+xS7b6DrXYAcVbQnV9h+07dmwXqum83gBIErOT0h6ti1Svgj5NhjuVyQPgGCjm2X0hcx7M1kRooc4DKgqUA2AuFBx3fnH8AwW4oHC0GH+3L9MPbQCQf2TPuZTjaH4+bo9y+oEPGxL9IFfbfYkSzHAPk61ylpwjE4wKyA1qmgtMS6QQLWHPpkMRHYZTpdFCH61HFGtTIrRCc6KRuj30nxUBCMOOwggIr9bgFy/iizK+cAm/VAOXIklse+9LnYfY9m5f0XTvOnueTgCIvzM9MZCzvDVYu64bu9CRCx3brjqoeDokgUJH8jwTKfoEd3emyyzq/2glwTUEZ8DP8AVcRf5dgafIVSthCwp0tHeEojDHRXQJfU7X1YvgdY3g5QZ6cnhpZn/AMhdEigqdGRClC7oCqqHAaIAYNrITG6pOLWguHAm9sa4We0NvdANV1WdjiPTC83TuIWTuaYynHgfcdA+1JewiQCzqxW0bu7vEwj/M0IinwRkTnIPu3PsFfeeIFu4ePbpNHFi5Qdk/S/FhFCSvBTrQmuaUyJS8Jc8JFaXYgdrxKOiFF/B4uE2q/ueVI7rPld8ykZxQQWNOCMVqtyP5KmUV0w008gZRM18weD0Rhy865yaANFUl8m6WjsuY0hgTKbXQ00qBl16S195pf0QeDCCIR+eEeMWP421XpZaC+eZCZJgOCp/C6Ndg1Ccv6GU9Ooe+cbSFuxMSGC5CQ6awjXnnQZr99YDpJtEo17b6ScLmDz5g3+srHkZm6TgQWX5HiRfY3yJDRTCIBYg47TQ3EguI536ZvstWkibUTqdDOh28yXA/rXTQWwwWY0Uhj6GeaEHmKuxAUC8ehqKsxkeh2AeEgGiwWcE2gGAboOcEjmscwUumaSUSSa34wOusF7ELa7zgtAz3Eq8yr71eb3mJxRXZXiO8iEdB7xAOrvFq8ELFtgBOj9h9A2RmQvMxZC8X7WKJUKJJLHRs5YNnVN+bw2mwVVE5gqeXj9DpX4WvvH3n+yNj8nJG/QZ1dZVHfm3u67iSu9H/o4mz+7XtE9lr3Jvbdr81YuDIvunyouMfVuDgrHnJb+Ym75vQPe1JgMAiQpME2R/4gGAwUKMtfbWiT8+rG16i0GSJiTelgngLhgXJdNQ9YHkGH0Vr6nz8lGBEwsWThZs7+Z+p67Q67/TFuukL+xWFBE/OWVgM/7mJL/fPXi37O17q1oPIn/pXqp/IwJ0zu5dvpTzUj/hQf4p91JiJYsfrtbKdZ0SWuhGqaWbNl47lZtcYt9XsR7Q4IgYJjeapCp5GttOHzr2AJNzwdk1DQ01lnYguzsh/trj4jQnZ8rYLMO5G2HUY/+Nb8tD5J7aEbT9G+S2H0FbgacuI5qslp57XMbyF+N/R1mhgQUdaSBWpROetTo9c8c9zLp0csspad8Y/bkPBiUt1Ty/oPSk09Kke82eiZlCAqd27oJx/fl3eKxuG3thi75IKv03J+uxltleGEtreEbOBH8E9T4O73nV7BAEdZeygWHtZEPGuS4LKSMkHZ1u7BNV0LmSXQgEhNzCTBJTJoqM8wQKmAuEQs4Xmn/pexTXQ+8x31xx5SF41b9TqzD6pp/YPm94MwTcmmGDMjTY3YCLEf18ukxY/3yFmb0IPYV/ZZClgXCmAIAoAdF6OAWYwABCWeJDuRnJhdH0qSmjIJwC9ubggrebyI0KSVbDRzapJptHE5dkXXqi0hT0RE+DbMSg7+8IFYXnFwgNHPT0Oi/KwAQsr6udSGg/APUU3xr/RYAxwRc2F4HpyofdwXgSSi0CKp54PAwby4oU8RZsm2CVRiSCw7A2LuzXFOgN+OFmw0ep/CuOb2f/uEZeyvvfSudZVw078UDdrQZ9JltBJPRfMIVyEYFpOnzX3jn/2U0z4B8Fh02ZMycwi3LT5QGYqPJ+c9flLAAJilot6sg+MVD+rvgO/CzihojXInKuh50RKgiIQw3zY9lR82KkJO/Nf/6hu7Nju08Lr6oQ3ew0494OjCG1eVJwcV/8rmZ7x9ToA4BJywXI2Gq2nd/VxkMEmqbVesraew1m2uISWLYqdoftXAKAGG+4J15Lf9SZPmcFJI43RQ5aP2xlEDvmoczRX56C2taxZHx+WMFn77outO4c08+lkSut+k858b8WBSjf3o5Ju4DBxDkMDQLAYADGF4KGn/K5OzFVO6h8d63FDSqznvw/zwCtFtbWF0Ae2wjuJbXEVnsORsn/9UriHpBTszLZR6c3Hx3ybjo8RkrJ1YvkvIM8geyMcjNY8h15r53Kblhej/DZRLsLIRRgz4vk9E0xtHTPjKLMLX/nyPAbzveL3TZi4LaLT85P/daRuxIg+T/mjuoL8HuNakeVY03vAyJHDxl7+0TEdrVk5dUB3bz8PRxZas2zGY3H1V8XOynMtBED0FPvQvcA9F/covAK7n5yjFyIXDlRR5xHNbRa/v/CVI3WF47pPbU1w25WT98k5xxD04txx6Yn1NQwZRT/FEVx8QBhIcsFGTR5TDerHW7bBfD1eIpnfTJ15HWHaSFrPaCZsm0jj+ZEEIx1RQ0uX/3xt6bJlS3/5ddnSurTUJSXpGRnpi0vS01DkrZ07d+6oNd3eQXzEuj1jRo8es8e0c0xhYeEOhuMiPJLiqNWhbIk5TuCkhwdvrPxP7RPK1+Ym7ZO4S8dz11rrPvGP21jw8eXaBfN7TQwJmdhn/jz4zw18qUuGo046/0yvvrgSO178IrMzNj+W+u/NjL54pFDvxL3/o+S7qvI9XLj4kYir0pyg/hDln7/OGnSsrtMzg5ny7zEuNHR890bl3+fJJXcjkJyaRpX/weQkeCch9auXnXsPvUPw9gbdAC82VEWkd42p6g022CjAKkbAKTSA6g71itCIdMpo5y5DO8d3HxFYd8nQdvEAvwiDMEJMSXQYxM67c/J1EoDUThfOkvkjQZnGItW7xm8EFr+pGCpMEIjZPVNYTl6U6qGKF5sdbEbu6ZsFkRf7oGbEWTA1g9NYcIenqJmL9dhCq+1DQ4kTIoQaQ1Fe09EfZ12Ha/SHJYETrYxp0JWRS46euHr4+DUS+hk7dEju4GVnjt069sVtGf0gLsrNHwsjknoEtd1a+syHlevkrJHZjz2WFRi1femGg9+ulvMHPaHICnPDdbRAygRm0E/jU1M6qIUsetcINl/YRG1cN+6BaXWTL5V4PtRMUfjFrLgcVKv5wDePHu3cwTfCJzB4UPvl2154QcrE/1Q4Xs16TCfbfYy7X0aDKqBOwW8ekR8eYmcmy3iGVrU37zloTa6m9Hq4ExGrEzGqaYVQ666xb1bV5uYNmRVa9+WeQXmXfkMrHLPWFqenCM3uHQcQhAAg/EnwcAddeCnGMS/v4iESE0etEalOtqIslINICfNI5IwrKdEZK7zTXDZ+cw8v+gIvvAcnDxmCztw73ijHwwGQqsmFASzmrAiNNqUXTdsBD5j5Is07sMBWhiedOQvSvINEyw6IL27vRWtW8nRFOsLTQbp2OppBJ7ds0FkqxxAWInU0nW40G61ikvzKNfztiasI/nQCf3vtDfn7cpgEBXjvOPrRw8PRUuzs8IDobwCBBQDhJnkOT1DM8RgnXR8VT3LXeTir9kC1PZy65WPp4EuHAWSgnwjVdCSRpmgZ5h3sIQ+TJ8rMTzdSM0IQ6IjEj6EZvw7z8Y3PPsO/wXzy3hedgE87rjku0speFIbMCu0NuKdQT3A2gWGcVNVUOel5VtNwAhWxRkrug0pIkSz8KEjQdON5kfIBwU7W2GGJNN74i798E3rgjOhdZa26hbTw6qDvkh3QBs+C7tD+FLp9L3TaPr0biTgMSx4lxgBIdBYQqihv8nvkPxKbKiWFSetRqOOa0OPo0b3om6odCn2S8Da0Xk4FrUBbQMtjQCxNiWa70doHMnC1gmadmyKjnVH4eJaHZzLBpInSo4LKF0aMGjXihcoOo/oNGjx4UL9ReFviH6+dHj/dPn3i6ddqEldbXp5/evz+mNj9Y0/Pf9lC8XgT18KBD611htTiG/jSS7hWfl/BuwXBe4YG71axNj+Ctx/FmwxaWW3Xmf0Y3uYEBV+GPlspiq/VFKqg36IgZ2he3tCcgg5HX8wfMyb/xaPfUTwn7GsXvX8SxXN1Ys1rpyeShxh/+rU/EhU8ZsAl4gUhFgSARGAzECSaqly2GfjqJxb7JTdtAXRHKva7oocjFffQaU1csC0bvD4ncUj7lAGvvr5i0Na+CYNikweh37d+mdm9fbtxT/ht+SSra4eooh6Kv1KGV8JSsTPzV6IYFVUxpqc6EFC7nBb1y5oKa01zVSn1UvBKoQrC60puxFNokCJAGJio8cU4ueUaM/GkG5iObmz0uO+xEG2ivTBV0zGQjuUtm4isKF0/LLjCuoL4+MqTQ+deQsIH6z/+6PTpjz7ecVBAlxoDLNLiMy2v/xoMIz8Pq4ZtQq583/KbLVJjoAUS7QjEiSTfEwoKwH0R4JpG0O4m8ih2i8SqZC2x2gwVLZGw0AIbe4CvhX7s62otmglX0S1oJYwXSSgcyRsDZrIvf5FiotBX9REesbHSczvdf608+5OIrhcNHDTKHS5DQ4r7b+t89KhXef7cyt/P3jxnlycULpn5e6Wy3nkNP0vZ4i1WsdoeECXPB1Uj+QLUmAe1Z6QuUik9TYxMdNpbiWa6jZVEoi+xGZvHxxGTF4mpvQ+NKXyn5+I1Kzpak+LXrVnbw1Yw0t5z/dpN1iRr7Kq19bNrXnu1pubV12ompXbJTF267tleB0YVHsreuG59Ykpq0qb1W/v8e0xBec8169G8QxhDdOgdCBqUPRQIgPg+2ft+YKqyJn7kEfy4TGIzrUFJVYm3UYi2Az3d2OQ9DfWSwWZk7Gfk61bkaqYa6VjeTHPfw5k0sJiUf6SlTvkHLegpmAW98dPQF++Go/HuOrwTFpK/YDwNGoQOaJEjofLpyps3yYBOsbV4hsivIqW/ka4F4KuM7FDZezDWLsmAvpNiK7ylYAnRsnCy/ajF+8zPP/+Ma4UW9T8LH6O/AAK5uLW4mvCqldjWs1hni+qb0t80u4c5c5Kp2tywOVWtjHexYe0dwpSuLK5Nyt4ysQO9G0Z788hYHt1kpTJXru5s1yMjTW6KvHkbzgLTyntzAgUXVw/tn9UV1/zyA/6UGLmvzp27evl7tT8P7p/VBRqv/g71JMe5ekHp0rlVt392fBLVJzwxfv7R+MdDElOegSfyVkZ1Wlnw1vFT52U4d/Lo3r2HJWW8++aw1e06rSp45dPLJ+XC5YW9Bw2K63KonUdAM9PAzkOHJxpMnn4DH+tboOyT58WfhDnOtWnFMjCwmppROrVc1VtHDH5E+YHsUon8CXNqa3HQrVviT2fOnKEZi8GkruEHqQq0JPomHsxQ+DSGLEVMI2tayYWV7juLeJ/HYkjht6hR15ZISmox1u4ZaVFaRu0GT5G8KzeKfIWeqFkgkXaTskI9ZvO6+BTO6vtwpV2H9e4ISvKfjeIgJNp27ztyZN/uchFtGjYsv7Awf9hQhzcc/OdtOBi/cvsv/OpcuAe2gZFwDy7A5/G3eBQaIG/d/eVbs974eu9mOX/gymmzn342Z+QyfAdvhROgG9TBcXg7yVknQxvui4/hKtwH2mkfAqoQfFiNWTR4i1Zf30+dUJ4tkWnqhg4hZKCKCFSz9IemXlYvs4phfaz9sp4UZQXrY/WouCJdn61HJJdyRn9Bf0NfrxfzKjz1LfSImI/6gMZ0iforzMmMaFzfDPcPI6ojrkT8EUG+BSIMEWjaQeVamHaQXodECMWEvk1lVCKbzqigkW4egmVKn1mlrzz3bPJjXZ54Acqvrl6+W98Mr7BOav5Mj5zO6KgpNjA2de7EKbOtaZlxsV7yqNK1y/Fx65Co0s5hEzLaR8coteujwAxhlrAJRIDqvy4BHaiGXRsuAQhK4EzhqBAOJNCccm25IPBZQponO/qxY5mQBWdC8TX2W86+NCTTqlwgqnzrCcygE0gGa/jMNl9j4i1y/q5Jw4MB3ibW8BtbUR1wJYDk3FqYvFlzEVmlFiTdZg1oQS+tseX+mm+F+luVNmFbdDWpvKZNSJ1FbVhCw6dGDf8qpR9+TZV+RDZ2JQ12Zdm5WoaGh7fCgK1vpianJeo8drqLWb32lHXN71NQis7xPAtTXHj6DfyW0H9ZSfKw4KCneia1zTQZTP2iErp3XZ6a+ERnpq9WSM2FfCZPDLSLievSpGuS72iLvpGa76Gyp0SwoVXSMUb/ni60d1flz1l3wugfuJ91RySF6U52ByBD08vBtwwrkQRNF1HJzqJJ27dPKtq56sk4a/fu1rgnxXcm7907efKOHZPjuz+ekNCjB5OJIxquCXWSB8HLG3SluoWL4hHF0WQXpV3ycle0l82LU6Z8eyUkI9pFl+IbvAOO/QaG1x8RsoSVJ/AMuOoEXHT3chWl41NoJ/pKOgECwRjXrgKVMm8B2ssAYLGS1Z1C34XQevFAzV5H1do2A/SQTj6CFWyqy4CkjtBXjv2wY0Yba0JqxttIfn39qp0FsxcjmI92rocg4fG27ZJSOsjj1pfO6DdzwmQZQDAKlaHrJCcdBT7URBoJ7uUy0liItFCCjoHqA10OJE/wViD1UwLJAwXTyyl0KKNDOh1q6AfZdGhQgOkzk2+Uh2qkZFQosyiiyP6LgsUHY6PSo7KjBPKVKMJK3lHBUURmXo6qiSIC8gNyq7ytZlv6to2i3w00KAHtTk0QRY1SaRsB4+H+zNTMtPh0SqPSza93T328Z8XmFYdk9Ha31Ixe3bvNE5+O7xAZ3y5UHjV71uTE4QH+I7pOnT9nqhxtjYtJSlyi2HuzST7/cWc+n+rCdJHab3RooEO2SLP5IqULeVdBE/VE3rxFPxpBB286XCYf2cD9fD6gpQACaxQw05Q+9EK45oh0XMb1bM4NJDYczOIAOeAh4XMuDuDhEizjC328XZtzNEEopkJYjBguHVMweErLusu6mFk9U0dH1JJQyqaXZqemCM3vHR8Un9AiCKdJ5xWapAEgTGU1ia01cdQHGhUQUFxwstVCAW2vsvigBTnXsAMK1+DjyA0Kn52F0t2+7Df3of5wg9BFkVNC7H1yKXYO3FBbi/r/ocxfhDPhSQLpDTowf9pNZdipLAwgcnHCZqLWl3AyS6RiGibCNM+MQa/u1qX17NY/REjw7N937Jxn28W0ay2tUuYajLbDLUQmSqAH3wf8P9j3XHewTeC82LD4cLjlwxKYjrajki1mJudmEXuknbMeNQOQFeREsL3Eg9ojdAghA033uB7p8D89p2HW4T17jhzevffIW0MG9h8yNGfAYHHmpvfe2zR986FDmweOGzdwes748TlMR08EW4VVAjE8wGd+AOjAZ3Aqu28DQLpMdHUkOA+Gom3k9XPoD4heAt+gdwEABo5aBB/lOzKQqhhsOHBr/C75zjkhmn6Hr2pk3ykm39klnWDfOcu+840wi3XNfQsMaCf9juposO8ABEbimcIXYmfWA9YDEEl9v/NL///p/JJZl5eye6xO+zaOdYPRQ03Q6yh9ct9h40f3m45+E+CfH35xfcO0pGDS+oV2r5ubm/1sTsGkXNb6dZi0fnUcPhjuvsZsKqUnSReKIkBr9mRZ0APmAndwwEsSxWjySCqMRYWZCT+CwymMwRWmuwpTBV6BQylMM1niYUarMMfB6/ApCuMtu/yOlwozESyHecCbzEVhaCzIi4hiLe5lKuwxmAEPUFiTRGFNylEwzLdp+AsA3WDJxnLJW7iqz0c1PwiiMxRkHyHAPJdOFrsnkJ2+CSCtMNpQpw3wLrTAl2vINGVgL6LueAodcslAO+gF8o/aB0b2By0k/Dy4fqE39ngHXyJ2wRXHXB/U2vGTL9p69yac00JS2rmO4fHHcAIchxZAoOwbnEr7nghdIgDdN3PhkYZ6cp/197C1bqOsNahqXGuZ0V+F6a7CVIESZR0NsguMlwozEQxvXCPZZY0avqC9HGzOdsqcDUuUOSUJNf7eGwCghTqLCjMTJCn85abCNJwjMHMZXgpMVUOagpebrMK8T2A2MrwUmIkNgQpeDIbWKUmN/ABaKzWzTN7Nf8QpC3ZBAk4WuExYoOKscFkgWjZdoL1PAlXFArUjhGABFZcjQSP9q12LdCSuL4haW4GN1S5q05bRonZtERvxyPbt91u3WmEHa966BAW0/lU0Q23hQutxR9bChfswmit9D2yfdXTus98b95nOSSul/0CXSGA6Ofe9H5xGYYIkDx4mQYWZCT+BUylMsCtMrgpTRaT0ZArTSnaBma3CHAdfwMXsd1xhQlWYieANWEzXLoTC2EIMtpbOtYOgN/hauCEuB55ExgYQx8K/QoBG2lEismMPdGykUSsjhIkQmiHUQdgbpuCqTTAZpmzCVWzAx+BTsAvssgW/zwb8/haYiT+gcwgEn/2kP+N3EADCCRUH8B0HfPywPR/ADtWGjNqH0sBbcGh7+tJWeYlmN5XWDVbER+ND1LdjiWdqJEDiyJmhEum2EFMhEvppGjr6b0wftKk0bwztSih47cn+m5b0GVjfM8wiwzux07vtexdV+ptk7BOZH9/Y59G69YaLA26XKW0KJAp5acD3i/Dd7BWxUBjWpt1vB1OLomD9wRYtfjvE+IfVsbO1SHLyhlnZs0bJna2XCmNRYWbCT5U96+cK012FqSJ6dCiDkV1gvFSYieBNZc8yGJsfkZSqvGf10GzOFOec65Q5vSSFrwECmwjMQtaXZQLZfBU+Z5raIfBwRhrdPegOp64d5OpAbO6urpuPVWlfoQU7Rh+ntQ9X/FULvfGt2r/q6v5aQf6TbPjXusqqWvwleReOA1eNHb+G8e0z5Fl3ysEgEgzSSBxfrhrFtbVGLzUaB/4avgrxkZh7SZqqXZrrGt1dky8wcQVPccQMbvRf4Nzav069+t1M2PX8sf6vRHRsOy8tLx+/t3BE+vApYrcrd//9xrSzaV3xTysrKkKDjgW0yeneC5rWD/y8Z9+CTcuUtWB1v9IVshZdnbpkMQika9FODmBrocJcVmFmwiQQQGFiXWBkyQkjg6oUM4Vor1MgwH0YiwpzPC2K/coDMNJpFWaifwvKRR0oDD1eK6ZaO19vFadj4DMwjULGyxQy3mBLdsoZAcQ1XJeXin1Ae/AY6AJOc9XNmkO9Hl3qLLBSZ3s6CKYrlh5bUZJelk4rntOJ3shOH5GOpim3iitq0hvIC1GeTRc624PYiy2dO6GGapk2fLdtrOaSRKut1bTztDNfH/rwCB5LcPB1o5p4HmwsIRWvLj2Tlfz15opjt375NG9Q3qRrSK49Oem1pPSXx3x9wzFEEFevGrWw35OPnaqflrWh7ZmiucOFjPHTPRA8OM40NKfHqAM79rzeffi4YZnN5TWHumSkZ+G7P62Rl+xv3/6FmF6Hnux4ZFS3zGz0S9kMqdWEUrbG/XAqrU0ma/e4065JY3YNq6uVvif3n3Dy4hLQgnJIiFPfqTBXVJiZsLPCr2EuMLLMYBgvpvlTiFCdAgFUGOmMCjMxMIhyT2sKY2ttsFkUPmugzbeljB8/cto9Y4HE7B7VXgFlAKAC6ZQTRgYzW4hai4bZT4cJTJ70B4NR7B4LQAxKp9o9+wnMTOmgCjMRO4AMvBmMq92TQvi/j3QTWAhX7wSkxJivPAgOIiaNV5BOqc637/Uil4AOJq8ges8Um2EONsWa0k3ZphGmKaYSU5lpr+kt0wcmT+IaBpkoTEis3dcUwvReiIm+AF/K+zQS1lbD1AavtvRDczBLGepcm9r8CAv6Aqf3TjUjCTpLkYnxEVSi0fwbDceQK2fh/uJRk/CX3/+IL0GfSwO3xon6/hn4dp/vLL0jew7Y1uVsH9x8wfaw9eMWbtwq6SfgG/86ewcfhwHVP0BzepyUvztlS9E82aeVvsqY1X560b3U6n1LO2RUPDvnTbpOrL6QyZ9+ivwZyuSPWSeq66TU/TH+6u/kwT0Kf7WWFSgV5rIKMxMOVORhpAuMLDEYxoNDmTyMeGAu2aLCHB/O8Il8EJ/TKszEeCYP21AYWxuDLZxxhEDwfFVMFA+ynI8nSOXPaFOsVLGaNeOowQRAT5aiXs9U2vvvxgd1w6k1S/7ExHq9cBsvpqly9PiXH1y8d/simY/gNZPUHh7m7Cq+1oQZWa52lcDbVa14u4pdqXaVkTCMakpRHlKNLOtD7Koc6H41fnTME+vGDx+F//6lw7CoJ9aNHT2+rmUrGUb4x7cqWQDrA/1lfNm3fUBJCYqshfFGnw1f9LhWZrqNP/FutuFs9z+29FnUBqIhnl4nd3ad2RY67G5uJ/Yoa8FquthaDHHyxm5FFphkN7ZiKswpFWYmHACYNPB3hfmDwTDeGIIYhI5BaOc6qMJMjGOSgMHY/Gk9gfJbrN6HzZfrnM9fmS9QNjXaUitJLDDtv+tj+U/ViTbdx5Km1InWdVozvOkyUd07jje6dOfrRNXnY3TIVehwl9EhUEeejgZ0zYz/IZXBrBaEr6XWN11LXUpLxBU5WthwXdeDnYMVTmxOEgvlDxhRQ6KPbjD35jxE+wgj9SppROAseUfz8768ojfzRcP+XEUJX0Nssaj9zdSxUE/ckNRiVpqq0/WoX5y7OAvXEx8oEwrd1mYLs+lJHPRUjnsF1sKO8YUd9x6o8PCEPaEH7ADdYS+9eyUurMRWX6LykmS3Tyrxp1WfAra3CU0QsZdCQQdiMc3WnJb1yMYQ/ribBGCk+iCBGEoJZQkoj3tmwB8aF1FNlUqM5k7HatW4UVpgmjZoIBeSVG0aadjiM5mZJxb9iv8mEmHxycyMD6fxLTL3xs0vLSkpWVyyQLjT2C0zetjwUTCuzkSkQuHw4YXaphkUuff4CVJ7ffLkTjhG7Z/ZSfLsKcS3dAOhLMuO+Cz7QW9dsC5WJ+Qpx3GSbIOORGytQkpl2dqPoFuZWO+/alXgHwoflooDUIR0geXNOrL8lKCWDKcL2c7yXe/7kWAiAhovms6OUeKVzhs6eM6cwUPnTU6OjkpKiopOlvwGFBcPGFhUNDC6c1JMTDKEyUpPgfi10E/6GxhBAmAlU9qZ3KtpqMtLe8ugXngprh1kk6s1XQwHod/sYd1fsEYmLJk1LOlAXESSVD1i+dDMmLD8VUMz2jM59xIqEn8WOhJL8KvzIMeaweJIqEhy3rOBsWMzKH5dhL/hcCLDJGDQ1GL6siZQo1UwhXV5blbKRfEALMQ73iPw3YQ7MF8Lz/Yqg4fKCaf59AvSIPwczK0CgM2B78Lh0Is/C5WIi+E7F6Zc9MVXoTv0IPhRXNDz5LcjwEkmc0/CJwEARpceDp3q7xJc0FsM/hSDPwX7MXjed/RQbbsuDWa0HYYCiXCDO8WEfRbO0JbYCAc8NzXla9iNjk/iT2HkT+fIGHsBKP4pbEBdhTvAi3CmXfAQol0j+c/MLhw7Z/bYwjmCJX/O7BG9R86YOYLmJ8FWZBUOApl8L4Bsa39ahRoG46EVpvz9Er4CQ15CEXgaXG6Ey+k8Awh8CxVeovBGaIJhRuEeDMFXXvr7b+EgnmvEc2EZXEfgY0CRME2KBAJ9KhDLjqJLjITmV+lhzUXsEGb2/OmogzCIyGQP0Ayk8/H8+31HdllydzbjeAoaycJYVSmq9XIelUkrnSKhVfCJFNCXpaVV2CrCMyer5NvC7G0221Q0w3EAPonw2/SZehK/4AqZOxqUgvsh/wfKsaIjSTlWbDQ7EI2zs/T8YQOAnupMYMhR53bvSHqcDhlskbyrZ6omd+jR5y1cjWeLSa1CZ3KQGGTsLw5om+os9J+wC8ftWPbY1DjfpHlpN/F3G8h/MOxmyvQs34RpSUu3wzM4Dp6BJ9HUV318jnkbYIuPUOWiSv1x2NrgfcJgPFDcrHKRwj97UJHwvdDx4Wf9Ct/T/DYqqlLWyx8A0cz6CFuAyY/qJNS2HjWpPfzJhf9/oseQqvkjL7xw9ewTa3PD02Y/XjT2q6/QuLo60muYW/llcMuTphYFBbmk17DRDugNgBAuWAjPGUA3Dc81d00lIHeRsh2KLYfajLzBeVarnnGeN8950Gz1idShA8XFH+DRHvDFD/EY4bysh6Hr16+fjoKwLEET8mW0H9XwJ7outANRYIsmz95cSznFHnsw726PCmymSZE7s+FqplxJkudpE+aPzpTbHw+GeeStNg3/n82ew3OPzp4zmQTQV4QegaCPpmai+QNnHf+vqyMs/4fqiIfURgwGAG4hOEogRiPTmzd1zjOZnmuXVFO4LIGr5mQsak5mJpzXmKNT8jb/Bbts07oAAAB4AWNgZGAAYen931bF89t8ZZDkYACBIx8E9UD0OZEzun+E/l7lLOKoBHI5GZhAogBOMQvyeAFjYGRg4Ej6e5WBgdPoj9B/I44FQBFUcAcAiWcGPQB4AW2RUxidTQwG52Szv22ztm3btm3btm3btm3bvqvd03y1LuaZrPGGngCA+RkSkWEyhHR6jhTag4r+DBX8n6QKFSOdLKaNrOBb15rftSEZQrtIJGPILCkY6jIjNr+KMd/IZ+QxkhjtjAZGRqNsMCYRGSr/UFW/JbX2oq9Go427QIyP/yWbj8I3/h9G+5+o5tMxWscbE6xdmVp+DqMlJzO1Bclt3mgtwOiPxcbmGI2o7KObO5lzmD+huI7lb9+ATv4Hvv74B6KY4+kdvtQ1FJG4dHCF+dH8hatOQjcCJwPszsXs7l1oo/HJa86vKSgqu4lmdQGjpXxPH/k1PEfj0DaoP7ptc7vQKphrtAksG81RySdb+NnazfUr/vEPiGj+1/jGKCizSSLCLPPvPi8Nn/39X/TWlnbvheT1IympZ/gt9Igueo8S+hcTPspAYdeXBu4c5bQmrYO/f9Z3nM7uM1prdkq7stRw5Sknc2miy+mn35BK0jFGvqGmJLS5k2ls66t99AVzPqpkHKWehigT/PuH+Lhj+E6QRZDDSyRneH+Qg/moscqXIcLLDN5FM5DTN7facniTZzlsY4Bepkvw5x/io7UkeJaDZfAm8lt4kfxGb/MKY6wuI8UbGbxNX9JrV7Pl8BZBDoPpFjjY6+MFVPw4OfndJYbLPNq5I7TxnZn8UVtmhEaSzsgYWK4ZN8gox83b6SL1qCFVKeBGENNNJbXmJLu2Z5RO4RfXnZyuEuVcQZsTn8LB3z0FW2/CPAAAAAAAAAAAAAAALABaANQBSgHaAo4CqgLUAv4DLgNUA2gDgAOaA7IEAgQuBIQFAgVKBbAGGgZQBsgHMAdAB1AHgAeuB94IOgjuCTgJpgn8Cj4KhgrCCygLggueC9QMHgxCDKYM9A1GDYwN6A5MDrIO3g8aD1IPuhAGEEQQfhCkELwQ4BECER4RWBHiEkASkBLuE1IToBQUFFoUhhTKFRIVLhWaFeAWMhaQFuwXLBewGAAYRBh+GOIZPBmSGcwaEBooGmwashqyGtobRBuqHA4ccByaHT4dYB30Ho4emh60HrwfZh98H8ggCiBoIQYhQCGQIboh0CIGIjwihiKSIqwixiLgIzgjSiNcI24jgCOWI6wkIiQuJEAkUiRoJHokjCSeJLQlIiU0JUYlWCVqJXwlkiXEJkImVCZmJngmjiagJu4nVCdmJ3gniiecJ7AnxiiOKJoorCi+KNAo5Cj2KQgpGikwKcop3CnuKgAqEiokKjgqcCrqKvwrDisgKzQrRiukK7gr1CxeLPItGC1YLZQtni2oLcAt2i3uLgYuHi4+Llouci6KLp4u3C9eL3Yv2DAcMKQw9jEcMS4AAAABAAAA3ACXABYAXwAFAAEAAAAAAA4AAAIAAeYAAwABeAF9zANyI2AYBuBnt+YBMsqwjkfpsLY9qmL7Bj1Hb1pbP7+X6HOmy7/uAf8EeJn/GxV4mbvEjL/M3R88Pabfsr0Cbl7mUQdu7am4VNFUEbQp5VpOS8melIyWogt1yyoqMopSkn+kkmIiouKOpNQ15FSUBUWFREWe1ISoWcE378e+mU99WU1NVUlhYZ2nHXKh6sKVrJSQirqMsKKcKyllDSkNYRtWzVu0Zd+iGTEhkXtU0y0IeAFswQOWQgEAAMDZv7Zt27ZtZddTZ+4udYFmBEC5qKCaEjWBQK069Ro0atKsRas27Tp06tKtR68+/QYMGjJsxKgx4yZMmjJtxqw58xYsWrJsxao16zZs2rJtx649+w4cOnLsxKkz5y5cunLtxq079x48evLsxas37z58+vLtx68//0LCIqJi4hKSUtIyshWC4GErEAAAAOAs/3NtI+tluy7Ztm3zZZ6z69yMBuVixBqU50icNMkK1ap48kySXdGy3biVKl+CcYeuFalz786DMo1mTWvy2hsZ3po3Y86yBYuWHHtvzYpVzT64kmnTug0fnTqX6LNPvvjmq+9K/PDLT7/98c9f/wU4EShYkBBhQvUoFSFcpChnLvTZ0qLVtgM72rTr0m1Ch06T4g0ZNvDk+ZMXLo08efk4RnZGDkZOhlQWv1AfH/bSvEwDA0cXEG1kYG7C4lpalM+Rll9apFdcWsBZklGUmgpisZeU54Pp/DwwHwBPQXTqAHgBLc4lXMVQFIDxe5+/Ke4uCXd3KLhLWsWdhvWynugFl7ieRu+dnsb5flD+V44+W03Pqkm96nSsSX3pwfbG8hyVafqKLY53NhRyi8/1/P8l1md6//6SRzsznWXcUiuTXQ3F3NJTfU3V3NRrJp2WrjUzN3sl06/thr54PYV7+IYaQ1++jlly8+AO2iz5W4IT8OEJIqi29NXrGHhwB65DLfxAtSN5HvgQQgRjjiSfQJDDoBz5e4AA3BwJtOVAHgtBBGGeRNsK5DYGd8IvM61XFAA=) format('woff'), +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 200; + src: + local('Roboto Light'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEScABMAAAAAdFQAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAABqAAAABwAAAAcXzC5yUdERUYAAAHEAAAAHgAAACAAzgAER1BPUwAAAeQAAAVxAAANIkezYOlHU1VCAAAHWAAAACwAAAAwuP+4/k9TLzIAAAeEAAAAVgAAAGC3ouDrY21hcAAAB9wAAAG+AAACioYHy/VjdnQgAAAJnAAAADQAAAA0CnAOGGZwZ20AAAnQAAABsQAAAmVTtC+nZ2FzcAAAC4QAAAAIAAAACAAAABBnbHlmAAALjAAAMaIAAFTUMXgLR2hlYWQAAD0wAAAAMQAAADYBsFYkaGhlYQAAPWQAAAAfAAAAJA7cBhlobXR4AAA9hAAAAeEAAAKEbjk+b2xvY2EAAD9oAAABNgAAAUQwY0cibWF4cAAAQKAAAAAgAAAAIAG+AZluYW1lAABAwAAAAZAAAANoT6qDDHBvc3QAAEJQAAABjAAAAktoPRGfcHJlcAAAQ9wAAAC2AAABI0qzIoZ3ZWJmAABElAAAAAYAAAAGVU1R3QAAAAEAAAAAzD2izwAAAADE8BEuAAAAAM4DBct42mNgZGBg4ANiCQYQYGJgBMIFQMwC5jEAAAsqANMAAHjapZZ5bNRFFMff79dtd7u03UNsORWwKYhWGwFLsRBiGuSKkdIDsBg0kRCVGq6GcpSEFINKghzlMDFBVBITNRpDJEGCBlBBRSEQIQYJyLHd/pA78a99fn6zy3ZbykJxXr7zm3nz5s2b7xy/EUtE/FIiY8SuGDe5SvLeeHlhvfQRD3pRFbc9tWy9/ur8evG5JQOP2Hxt8ds7xLJrjO1AmYxUyiyZLQtlpayRmOWx/FbQGmSVWM9aVdZs6z1rk/WZFbU9dtgutIeCsVivND1dsWSG9JAMKZOeMkrCUi756MI6AN0g3Se1ellm6GlqOXpBxuoNmYXGlgn6D/qo9JOA5ksIFOoBKY79K6V4qtC/ZJy2yXNgPJgIKkEVqMbPNHpO14jUgXr6LcK+gbbFoBEsoX0pWE55Bd8W/G8BW9WNboZ+b/KPyWslDy5K9biU6TkZpY6U6ymiLdUv0Vyi9jvt1boT+x9lTmyXzNUhaHKIcqyEaDkLfw8YTQBNDpo2NHmsVjZtrl2u/kZLmDlHaT0BJ1HTZ45+gbdfTSznJVOK4WQkWAAWgiYQQB/EVzAxYhheIvASgZcIvETgJGK8NfDdgN1GsAlsBllYO1g7WDtYO1g7WDrMcAK+a2UA6xci+kp0i0EjWA4s2nMZO6DNrE4zDDbDYDMMNptIHSJ1iNQhUodI3R4DafGzG8JSKEUyRB6VJ+RJGSbDZQSrWsb+KJfR7OAJ8rxUM/Z0xq6Tl6Re3iTyjUS9WezsQ+7e9L7j24G//uznFl2th/WAOrqPNelG0hq5z6Srk6Ub4Kau0Mv6qe7W7ZQPsxIhPcgeX3sPns6DCDjYSX/9rj3/7ka8bbeNGQXHE/UzyZb3Naqtt/W+FAepZ1J3mVOWPoW7ipYzFE8hSiE3Erfcabyo/I+kF7TVzPBMiq6VU3Wr/FGy9F2y1MD5aLfeG7ukh3SKztOQHtOldxmvgTW/3uWKBeLrqifdSuxbPeNypiOTPb/StfqBbgBrYCOIKkifoH6ou3S//oxFky4jLzLWvTSoV/RrU96pR/UY36Mdx9VzerNDbA+b/M8UzXE97TKTYCcvdY079Fxl8v2duY3vJb3Y3lvbjK+QWdMjScujKb226ze6V0+AH9gHId3G3ghxPk5yZs+m2BVzo4j+otuYZ3wX5ibGa4uP3R5tYufcaU32pGm7er+ninU2ffVaVz47Mt+tHXstTVvae0Cv3PeYTjqG4n5v927ukWDyTnDucuZXdXEerpqzcsc10D9M3nKnmNPFnZ6n7nOlY/RxrdBhYDA7yovKyx/Mq5N0vr6l67EIaA4ne4k5369QP6Kvpd4r8RRjZ+hP4PPkPrp4i832qOJ/AP1E1+ke7uE9nPDWJJ+Jrx4Cu92zEZtr6m93h6H2O7CDtjENA6eSpZOdzwL/84C8m3g93kuyeVN44C/L1LyIT7J5D3gNqz0SVjloc7lZuAc7/RfC3NHu/+dBU8tP6vORAnN/90poeoM+5H3vIaYsM3omo/oYwfVdgLgpk6+vWxvGSuQWfkuMV4v5+Q1TAaIMIr2ZVYhyIWLzCipijKGIT4qRPvIU4uNFNJz8aaQvL6NSeBqJ+HkjlcHUKCRHnkEKeDGVw9dopJdUIBkyTsbD80TEIy/IFKKoRLJkKpIpVYhHahCvTEPyeGVNJ7oXkX68tuooz0SCvLrqiXCezCeSBbz//bIIyZAGxCOLpRGfS2QpHpYhPlmOZEkT4pcVSJ6sk/XM1325WdKC5JsXnCVbZCtlG75djiSFI9uwkwE37hv6Md6G2cx+NJYVzKs3MxtPlJOQ/sxtqjzEO7FaBpk5PMIMZtKznvgGm/hKiKsJPjcw3oj/AIgWgIQAAAB42mNgZGBg4GLQYdBjYHJx8wlh4MtJLMljkGBgAYoz/P8PJBAsIAAAnsoHa3jaY2BmvsGow8DKwMI6i9WYgYFRHkIzX2RIY2JgYABhCHjAwPQ/gEEhGshUAPHd8/PTgRTvAwa2tH9pDAwcSUzBCgyM8/0ZGRhYrFg3gNUxAQCExA4aAAB42mNgYGBmgGAZBkYgycDYAuQxgvksjBlAOozBgYGVQQzI4mWoY1jAsJhhKcNKhtUM6xi2MOxg2M1wkOEkw1mGywzXGG4x3GF4yPCS4S3DZ4ZvDL8Y/jAGMhYyHWO6xXRHgUtBREFKQU5BTUFfwUohXmGNotIDhv//QTYCzVUAmrsIaO4KoLlriTA3gLEAai6DgoCChIIM2FxLJHMZ/3/9//j/of8H/x/4v+//3v97/m//v+X/pv9r/y/7v/j/vP9z/s/8P+P/lP+9/7v+t/5v/t/wv/6/zn++v7v+Lv+77EHzg7oH1Q+qHhQ/yH6Q9MDu/qf7tQoLIOFDC8DIxgA3nJEJSDChKwBGEQsrGzsHJxc3Dy8fv4CgkLCIqJi4hKSUtIysnLyCopKyiqqauoamlraOrp6+gaGRsYmpmbmFpZW1ja2dvYOjk7OLq5u7h6eXt4+vn39AYFBwSGhYeERkVHRMbFx8QiLIlnyGopJSiIVlQFwOYlQwMFQyVDEwVDMwJKeABLLS52enQZ2ViumVjNyZSWDGxEnTpk+eAmbOmz0HRE2dASTyGBgKgFQhEBcDcUMTkGjMARIAqVuf0QAAAAAEOgWvAGYAqABiAGUAZwBoAGkAagBrAHUApABcAHgAZQBsAHIAeAB8AHAAegBaAEQFEXjaXVG7TltBEN0NDwOBxNggOdoUs5mQxnuhBQnE1Y1iZDuF5QhpN3KRi3EBH0CBRA3arxmgoaRImwYhF0h8Qj4hEjNriKI0Ozuzc86ZM0vKkap36WvPU+ckkMLdBs02/U5ItbMA96Tr642MtIMHWmxm9Mp1+/4LBpvRlDtqAOU9bykPGU07gVq0p/7R/AqG+/wf8zsYtDTT9NQ6CekhBOabcUuD7xnNussP+oLV4WIwMKSYpuIuP6ZS/rc052rLsLWR0byDMxH5yTRAU2ttBJr+1CHV83EUS5DLprE2mJiy/iQTwYXJdFVTtcz42sFdsrPoYIMqzYEH2MNWeQweDg8mFNK3JMosDRH2YqvECBGTHAo55dzJ/qRA+UgSxrxJSjvjhrUGxpHXwKA2T7P/PJtNbW8dwvhZHMF3vxlLOvjIhtoYEWI7YimACURCRlX5hhrPvSwG5FL7z0CUgOXxj3+dCLTu2EQ8l7V1DjFWCHp+29zyy4q7VrnOi0J3b6pqqNIpzftezr7HA54eC8NBY8Gbz/v+SoH6PCyuNGgOBEN6N3r/orXqiKu8Fz6yJ9O/sVoAAAAAAQAB//8AD3jarXwHfBRl+v/7TtuWLbMlm54smwIJJLBLCKGJCOqJgIp6NBEiiUgNiCb0IgiIFU9FkKCABKXNbAIqcoAUC3Y9I6ioh5yaE8RT9CeQHf7P885sCgS4/+/zE7OZzO7O+z79+5QZwpG+hHBjxNsIT0wkX6WkoEfEJCScDKmS+FWPCM/BIVF5PC3i6YhJSmzoEaF4PiwH5KyAHOjLZWiZdIU2Vrzt7Ka+wvsELkmqCKHtRYVdt4BE4FyeSoX6iMiRPKqYCxShTiEh1eSsV7iQaqF5RBWp7FaE4o6dwoVhHy+H5apHH6iorqZf85805OM15wrd6edSAhGJjfSCa1KSp0jhWk4gFiFPMYeoEleg0DpVcNXXii6SBCcFl2qieaoVztjYGdUOS3XslExxjbAHX+fyZYFqoTQgdCfnvz6snaPcl/AK611DiLAGaEgm6fRmEkkCGiK++MRwOBwxARkRsy0OjmsJTTLZ82o4OSU10x9WiaO+xutPSM70h2pFgb3Fu9LS8S1RrK+RLFY7vEWVjAIlqU5NdNUrifomza76iMlszavpbRIsQI9LjYezPjjri8ezPg+c9blUG5yNc9WrAZqndEna2etfp3OJL8+6s9e3p514oCS5argkkwfWZa8SvsIiNZZEMxzEu2qs8TYPXqrG7ouDD7jYq8xevfiKn/Gzz8C3Eti34JrJseukxK6Tip+pSYt9Mh3P871dHI9EumTkQkpqWnr+Bf8pvZNABJ7CgCcAP2Eef8K+IB/wBfigB3+K4K1rqGuwVk/bDRoziHaDl3/9z2ByXjs1YMwA7S14uY92G6y9SVfeQV8bRZ/X2M8o7bo7tDK6En/gPKggqTzfkY9Kj5AO5CkSyQMJKm1BDub6SJ6IPM3LteRFZBCm4g2rKZb6iJyCp2W3BbQ0v0Bx1KnpoKIko05WOXe9ku5SZWB7bkj1guDahhSvSzXDicSQmuWsV/3uerUAxCOngyrHFSteucYmprTJ9BcrZrcSLCZqiii7txPq8CdkwVngQlHYGx8OdSnsnJ2TTws7dykClUyjThrsnB1sI/m88f406vNKJl+wMJ9W8uWHHvvblsd3fPT225vLtu3l+PLnH//bs0ve+PCtj5TS7afoc5L63KqKSQ9f3WfnS2vfcxw65Pr+gLhi96r7py7r3e+V6g1vOXb/3fYxWNCk8z+JC8WDxI7aDdzpTh7S+aN2ctRHBOCImuCor+2amSfY89SucCjb2KHsqKdKjwKF1KkOYIHDpXp13UWFzYDDfDjMd6md4bAtaGlP+O11yO4am5ACRlCsds6HP1Iz89LgD6J27SS71ZT04mI1QYaj1LRiZArwIRyKT6VeKdgmu4gxqCfVGeKhfpp1mfcnrZ43d/Vzc+ZXjbprxNDRJcOG3VXLvXVDtJjOgTeqVsMbo0v0N0qE/gPmbt06d8CcLVvmDJk1a8iAIXPmDGmQhakdzz26euCcrVvnDIy9NXD4jJnDCHiz4ed/El4DvrUhHUlPUkEiKegVMpBx2VJ9xIqM684Di3oxFgVBeYK6eXeCw04utSsc2kGT7C7VB4fxcr16FfxGPmy3ChnZHWRkks8OTHInprZjTOqeLbt3EJM9MbVDZ11rOne5ijJ1ATaAdjgp7QUeDdTEbwrmOGgjV4rgUzkmB/WAHhXBRxiPhj+x1HnzwMiqx18adtsa+lynLpP+0u81bumM2w7d9/Hpyk1rR2y7VisRTVzBtEEPXXW12q3TPSPLJtN7K98YYxvz4l+rNq+dOWzB1TO09OuUMfM+/+th8ZGBt9ZFZlVffw09JpqEzJEruEN9Hr1pYYeSroPGLgAbnCb0IceY387WvbbhsqkiXeCvkVGN3nmauSxb6EOt7+3XThK05Ye1TtxEaSiRiYdQxc0YbAWr87AveQpdpCidSpzsc7mBDdnkYRq/SUp64vDhJ5KkLdoJrqeTjud6l9C/3B39Vdvu1bZHfx1/7RiuM17brXWivza/Nl+n2puu3cUtF7q4nKJwPIHLE1PQ/fiRow8nSS/TeO3EZkmrKOPc9EYv/QvnK7u2JLpXe8qpPRx9bwzbdyo3m78B4oiD3EMgpIKzoQVUcbL9cyB7EczExZy5kp1EIQjnv0NUQvPfQfd+ovP+TPTqDoW4FMdeQaEuhdvLqZwjP58qDnSmVBU58Dc20BQeY6jE/IrIh/ksv+gx2WiOJzWD3iiMNdO+Aa3mm9vq3rvtiHBr6Uw6VVs2t/Re7YuraCft4560PWH77U+WC52EHRBlbyEKKVBMYZXa6hUxBMJD70is4DQpwUPKo6OEsGutY3EcdFwIRSxWfM9igo9ZLXhoJZZY5AW3D6EdXL0clPvTyHT6utZvOjetnH6i5ZdrafSYvofBmkadZBfoTBbuATXG2kxjQDJoUwKSKxY3qszgfhXj4Iv+6pe1E/p1OnHdOBe3Biy3DV5HpVI9/lBFKAAW59XyXtREwB7G3nyd6Ddct9JS/G41vHQk6+G77WIIxl7feICXQAny3nr2o18CsUv10vXr8ftp5x/g/s0wkEwAMiHwgVX1z/lpmKZxoyZEX5gtdTjzKcNMi8G3BA2f3I1EbLiQLMW8MTqVFN3vOpv8LjAi1fCwqk0oRlZ4ZJc7HHInUhcXbMN59PAi695x8ekjR/44feTw/1SqGzZsU6qrt3KFtB9NpCHtA+0H7XXte+0j2omavv799Dd0/Lf/+c+3QMeu82e4DWItyKI7iQjo7zjcEeVcGXsLEO8wsQjACidslkeBC9SiGzNoMxMRMjcLRL6L/rtSNN865Gw/sRvyaDJgLBloToKjiAMptgHFaCRqPF8fiWdXi09CLUvWAZPMABPYpSrBcpIHPyDZQdU8Eh56HLByCrzrSZTdEd5mLQamqDbgj+IsVuLliEQ8xSzIZBvO00T9oI6FNOYefcHJ4h+f7Dr2zGJtMsf93FBJjy6c+OzDGzZPFjw7Gg7vqPyfFVo3sXQEl/rUOyOWrH91JdIx9vxP/GmgIxe0JtIW6RCBDrEtbkkEZkRSkCQvkORlCMObYMmrtce1TYGQakfR5unuACID51L8iDcS4DihADEFnEKUgRBDyXIp6fiuDMdyAaKTiJzOMEscEN4ewYcfYgegjrYsdsQB4FBJVnGxYpeVNgBJ3GpienFL5JEHxsMOGPU5jYxhyCPYJnMsV/7Gs6u27nhp2bI161eueLimnBP/3L3/h3nTliw+d3CP9jNdJC1TXnj62SfL1sxesvbFxdLLx+p23729fc5rc/Z9fQR1ux/IuT/YgpU4yRASscS0qJbYLJwdgDoAZ6lekQAYuwoUS50SF0LlVvhQxMxciFkCJloYPLagN5FRuWyoXLRY4WTFwVSMhmVAkqBnkJjkmPpxax44frwi+h2XKoVpeV++oSGrVHuclpfyvbiJzD9sBZszw77SyX4SSW2UW2qj3FwoN4+tvsaR6jLn1fptqS4Qmd9WzxC8s64myUkceSoHcRxFlOSMAXPmyx1O9OVOh+7Lr9p8ZjH6clFxuhTXXjBixbN351UP/tkVztpqvA6PJy8CrxkPZTwUlEBli4nizacRl8erw2aqmtHTpxYrSaABbtRsB8g3QsxJxRfIFERpyvEgpO5Fi7q4fV5wBtlbufHVy9a+8MITDz8ZGH0ztz+6rkvRwik7jx/9uvYXOl168rkDO9cdHDrMxadOjp4JdeH58+TwUe3PdwjzTyuAV+nMVnPIXSSSgNxKi/knG19f685MQIjoFoE5bZk+J6OrCinJLmSK6gPmtIPfgWTQUMHkTmAampkGGupzAgS0uYE4c7EiyIoJqZE7E9BEvykfAI2UCgYKbo0RQoqak7mCpn3cf3lxenH5wLWf9dg55cDx3w+8o52r3Pv08m0vV03fHuBS6OQG2qtNRklGWsP78weO1H498rn2I23f8PGv/3pxW92cu5guDAAdRV2II51JxIwaik5bJWie9gLFXIfpaixFg8CnOlAHiRk2zRfr0cNKeVOwyE08A/jXT5zNtVXacqn5C/GGsjLtx+gebemMGXQq91dqIoglxwA/7cBPPwlCjnw/ifiQo8nAUQuu2wE4mhPwWYCjObiFjoyjCcBRCR1AJhwkuNQ04KcbDnPxXBwwuBOcyM0ENGnhfckBJ2MxMlx1E3ACObLq5OF3B7caJxXrULKoGZJkNi+AzTfnsKfZ8ZiqRfcuPvn3Xf956N5FL2hnP/hEi1bse27FgbefXnGg3ZYli7aqCxdvpgvm72nXVrl/10cfv36/2rbdnnkHPv3kwGNr1z360JYtXMH8Vavmz6l+HnVqKPjNfxk6BejIGot5LAJkAQcS0qw8cCBBatIpbz0qFIQ/JRBSTV5dp5LRFdhZymV18LpmyVb9XAK6BzUL9Yz4dKIJi5BeAkaRU5RGWQKBuJkzcLNO7FByftenmnb6i4Grr4vvu2jwhgOFNZPe+m3W5uULtmVtX/XIK/zuozRXO6md1QZHtfq09DEZKV9/uHzEGOr9cuOxRSUrP/zytG47GCSCQldWD+nQhCYYIEAsYUbSADshlAAvyBCFpRFR8PCzculSwBX83xBbcARhTo7QDWKyhXQiEROgalXCC1ljAEkxh7D8IeH1CljR4AK0ZMOXcYCY0pbGMJOwAq+u28IMfgn/EVydgFf1UZPPT30D+O7RlRMmcGX099F0xhztlxQpRTs9B/fzFN3Af85vYvQl6UjLqlNnZdQZxKCNUPh5iu/TsJvvQzeMG0dXjRunrzkL1nxHX7OokBYV5lBYeRZXOWFCdAk/YMYs6k4GL+CcqT04mvH0ZjCi65nupJFJJJKMPE2xx9CDrSV6SNfRg5uhB4CiSnIIzaU2zUu6C3lKXCOkYElsXBLoCh8PhuKRVYsLHW18CjpaKe4C8OCgviB42Bh4MAWRqzfzdRtq3l00o1dyBc29Y8JdS+bcD1GHtlkmlLy4+9DmxR9PLRwx6oG7byt/Ztq8h5fed279ypVAzwytu/S5+DAJk2vIFhJxYrXCElaLxHolLaR0KlBzHfXK1QWqD35lFqg8Aq++zCRyIOfO0X2sBMlEP70ydNW+s1P11KGnS+m1FzzLGSVpL6lJSu7ZC+swtPGIhZYcsCCVtgWaA3Jvi4WXM3PzOxV2w+KF5FZNbZAJzlz4TId88NVXFwE7EhINdrhJIIPwEsYYI/3s4mauO8xLzJ70D3AkAMd++EQGofobPWiRh/n3GW76Ga2gi+lS2Vr3wcB75MLnyh5Y4vGf2Dhyaj+OD1lvKnr0RZtbU7Sntb9rI2QPnUhvHlLbK733B3dqC7VRXLHr1lG3P9KZFmQM7PigQr+mGzlJS9WGHNb2lQ0fNfqXgxoNFxZx0X0LR515iy6i27R22jxtkdahfbB/u470Nzp11au3T4UMlsvwJ/0M8oCsXvgG4oEJMqH2us0qfJgFhVrJTCi4JQlxQFwBy21UipHAigVMAPdBPsB7AkAo124KlzXr6Wjp07u5G7WvJVE5exN9WhvHUcg9WBzYA+ssZvmhH9Ycb3gHJ3hBFn8y0Av62XLMCwaYyJ3o/kMAJJje2pz1NaLNYwYDgPMpYHagyG0o/slCKlH9TpYioi+ECJuhY3JIxJojvayA7uUDhbGDPfSl76JzJy7aEP2HNo/Oe+HV6jXaRDqoasurivaBqOzZW74hI+HQwv2flK557IGNpcsWP7RMt+WFENs2g22mkrGGZXqAHk8yg+jxgKsYaIgDPBwn4Lk4CxppGiPNBSS4WPVTsYQYDDaF1HQslrhA+4TkYqRClRJRIeM8cMqUoFeNXODVBUj9UZ+4VOp1o4KF/RLEM7KQ5v72I3V5uPKEd17d88MPe1495C/nPNrP3/+m1XGjT9J4OvqPb6Tte7XDP5z6t3Zk1+vSl+fonehnUD7vg3wsxEM6GtKxxqTjwdDsjdUiFKsLUQHzIz7dfcug+FgzCAB3SU/amSBXq6mNjtDWa79DutXxMPVrP36ufSQq2nNa/evaj1pVKc3/Yfdxms94iesPhfVt5DpjdUtsdQF0Q9RVUeSZKuJGYmk4S9EtgFQUa0jPx40kXE/A9Z89/FMNx7i/R6/hg6JSFj1aFl1fShrXHcXo7q2ve/GaJj3itLamsaDtggX38C801HEHoj1wsbfujt6ur7Uc9OUD0JcMrKmlxfSlFSWpTUhMQ5DJ8uFAK/qCkNMUisQzVYuHNIvZga46aaA6yTKzhwRQHCW5WI2DNNFAmy3Uxyfr6iODMchMg5bTwj9+ohYfNzlp364Dp7T3n3g3S5tNz3XSogc17XVuCMjUQW/9aZe0fLt2/Gvtt+PaVzd3pLPKomevm0mHNfG0nsnyKsOjmHSPoojhWivPuGptkqSN9UcUm15lFljDpFGG2IAJQ64DTK3ge1RUNBwQleit3OazN3FV0RJ9PUi+6M2sBhFoJsPG2gVcDX/ExiseqUT/pH/3FsBmKnzXg3rnaMyNHI25kYVdCpTfHctcWQ5k05Vfz1UcwGsL5CiKu3l+AithZpmTXdj5Fq5843OLNlee3PV+xVS6TKpat32F4Dl38q2fxpXtNcd49jPzjzGeWZp4xtsZz3j0jM7G8ggXwooaUXm7nlFQPaNACsE5+y0U4nQQ2PYW13MxF93ALeIejT7/NrCvhKsSo8XRgMhtiQ421jbB2mIsAuBKBg+lGA8jPNN6XrTEKphMOL49lRwY9dntTfYkdYRryeQ241qmuHAjJbGKJkvsdUaa9AKkKhPGSMUs13BinB0jskmv92F1JcLbHCwKM9ooaoQnhwapySPvWc35JS6xqsIqRb8bHD0u2WA7msiBhjzAzebOakIDjS6Jzm7SzVNMN6+9SDebKyRoo2Dszo7ixt1xLGszG1tSeUtsQ0WootQk76nku0ugowchAJ5Lo8I/z94kHKfnUsG/zgLb//7Cupc5VveyXLHuJdj0uhf4/5ivzSAeNF83+Fssgvlm0Y6UUIF20d7VGs4T7cPK+o8+O3nqHx/9iK4/kY7U1mo/nNS+19bTETTpZ+1bmn7q1AmaoX17QsfvyJu/sfqFh/Rp7g3B/9dabEwHLS1DgS2E0cCJBV4jGqgem9wy8AYDibQp1v7+r3Pn/qUtoHNqt9du1xaISv3efT9G13H7X1n28Gv6Pmadby86gFcesOebSURGXvljvEpDXrVhG/DCBrwuNcngVRBLE17Muh2yjbWjZEiMABXIumalyaBOzVjo5Ux+UxbDaZdg5MTSs4O1P7s/cP0lubleOzP4RP8zqakXs5Qju4CfH4nbALsHSamhbS5d29QgsDQxmbE0EVmayShKAoqSQ0qSnvmlM/SuiCE1C9UgSTfzOFmRgapEomMd5uqV4EVYB6BBvN8Hfp41jZqJYBc9+e+zD85YXJGRNSMrbcsqbSy9++CO7a9oD4nb3j847ZXcNtsWLu07oU1C5oJrFz24KjqJ+3PN4sdXge1gLl8JculAyluv/2GTUU2BUJYi47mUhJYdxvbNOoytNBTN7bGmZ5ODLK/FJmKNw5fVvtUWYmY45AdCfaaWLUQhKKG7HcNN0jZv+Sxy9NQf1HP4nw89yE/6UN12cMc3P/2ufXf0i7VVdIX08voVsyue6dZj77rqT2ZP3yqK0vJdz02b9GTXHu9Vb/2AThp3SEJ/0QFk+BjDx2C1UvN6icKHWEor1aHuR0RWmRUBFEQk1naVsILXlBFiL6CDUKLZKrFScnaHeAPzR9Ws14b+skjPhlTJ8L2KtdFd8lgkdOHFWPUD3SWkLljsZaVwiDONAQfLGtWVX6m1xyq0o//+QTtGP+O/bMja+e6h1/H3zw1R3Q8i7v+Q4Z6AUakkHBs1QKzDAI1KLLGiT5j6w0WI9zMW0B2pkJ9uXxD95xTwcdeOHi3shFBKSTH4fewD+EitXuNRnGF2yQjFAACXjWekUEjVqUuNww4hyl7P4t7485erWVufuBTfXofe/9m5r+rkcaOUmO9Q5L2q2XdGVEzwxuyfb8FqIsSQGpfs9ORF4LVZQbGGM7tklv3t4Exmp0v2NXXlKaxthGziQ8fKvDiQmE6RRP9VFAmlOUETDRbPpJb2UhHtPIV2LpQKqGmG9tAU7bVsKUvbMRXIP/EN/VbwnjvxT/wFvv6OZ589t07nb3fgr8LiTLZh+eYwKwYbcUbPpjiMI4KVxREL1f8PWmh3elpLfoI+S1c9oaXQ049pt2m3c8e4D6LLuUnRUDSNWxCdA2sEYI2dsIYZEbupUYY8LGApUEx1DKFbEambWPQCivUDpBfWooirltG9dP+y6MkKUWn4nG/XMCZ6gkvWaYDEQBjPdCQ/FstjeJXn65sUxaRXqAE0G425cCENYBEk4LuTH9bwBv9xwzp+9gjh57K/noszcMI67W16UpoHdlXIKimA7LGSQvlYnajW5CV2IQ9RDphX7C8+FDMpgB5BOexbR2/45BPtbdOrZWe8ZXDdjucf4MVYP4q07EeBkIMd7+NG3ScqZz6FzxLYQ3+2h15EMRXoRl2A2J/twVQHy9VK+sKSS6VghRTs3RXbjClW8fFB+AcEHfj0U9pf2/6JdKLsz+uxvsQd4RoY/xp7YwbLYC8sfQYt4wfQvGE0d9qBNCntDfjC59F29Pi4cVqKzid6fhU/lWXQSc2wGR40IywM7oXyUxoeK2XfuUPYSfeLB4hA2hC9AcELxIWdRZFxFnLyOAG0Qt9IUdgTvINbeeg+cY+o/YHx927AxG8LAyFq5ZMTemarJIUjAVw9xwoZLhbizBDA+PYBD+JSLNIUMPPGgm2mS7Ghp2cTAECvG09hDTcipOaGQiFI0zGtVzsatn/tb/2Z7SfnC0rqXlFNij8jKAl7d+799XcLs/IEV01iQpInT0l11aSkJoO5w59N5h6Bc8zqExJTUmM1n8SURnvPtLNBFTUNgEnEE8hhzTI+AJbnx1zJLEdszni9xNM5s3usQVYAJt+5iFXAwL36IZAWNp85KITP3E35r0499eDsFydxk6Ztr/nC7pwdZ+3x9uyqbRXTx89/s/1/1u2nGU/XPjht4ZzhVJKkqcNG7Xg5eqJ4QmHRTe1uK9+4dMjk6SOPLWOYZzXEAUlKAE1JJ6MN7GVHhvsA+EjI8BQ8YH01iWJczWAMd+uJgOyqV9wuNQHnwPTujOpG2OPSywh2JDkF3Z2LN0CrzDoNst4zyTF5jPowIiDJtLqyy8Zp+7/66o2KzYV2ue2a+1dXPb969rNZUkK0cvhd2jta1Peb9s2dQ9fRjJGTfzzg+5Dys0Yz3RsNuvMO051RRNeYeNDX+ECsSBkRkBYnYAQnS3edNqRFRz8eoMXjUhNBL+JCaqqM5V0GfRKxACIEWHEuHg7NqcYEjbslDEDMg4Ew7Pf6vCbIvbjRv34Zuf9ebvy2uVurNygVO8ZxlbPXH/0PZ849QTveU7ZOEqUFq878PXfvn0umS5L4aEkpLWDymAx0fGrI404dr+vhGeUhxOQhMHkI5pbyMARhsoGux6SR4EYSnKBvVhmU0ZBGnMko6rBCImYROc0L9LKepU/+8sCUDUUV46xdXr5335eVq6umrcpr9/T0qjX0vI/ytGjUEG7BmR9X3z6CBn478OPYEbRh5H1a9ENGxwig4yOQRzzQMYxEvEiCXTJISMWqm8UrxKpuGc1LPIlG+oO7T7QirLZ7/Swtk1WXjLKw2FGhZEMWhE0rBXz61rH+2YZ4/AHdnEZQ2+63jkeFfVXlVV3DPV+f/67223yOm7Hh0UW1NFr0Iw01fFKW+sofvbrd0rs/bU8nimmP7H4X9KkPEFEjdSB+ciuJxDOrwPgjWQAk4WykHFaJCGoDWCyhQIlnExo+rJWEmk0URuJ9TP8QkSVixJLQJVjYvsN6W6ixAacjtT41654M9A06E8JtSsZSTtMq+cMlVesiVstdkmlWeVVJQ1v+MNMTrT9fB/xNJXlkmlEFDIBmmGFzOpPbmpkb9GIVtT1jcBrsL83FsE9mKMZuNl1WoHYAbqcR3XL9co0g25ONyToTcDwZ0htA/2pbe/OKIFOeIr3a0HqnJ6ZIRw/eu7HIUfrDBwOVPum9H7256oWijeX7j1Y+DyqVm/PM9Kq1hkqVjthy7h8f/5odKM0I7Fi75JahtM2v++vH3UH/GFmpNXygx6YqCEtfgI14yAAD41jDuq9yoq9yNvkqb6N9cyE0cZvhp7CCYvMw1ACmTQy8GfNO4HmD+kyHSa6q7FJbuemVymUzZr6YA27ontET/vFNtJRbrTw7f3xUYrq+BTaVCfthc76x/BWVBAOl0KIB5dQbUM7GBhQsiQ2oLRUVFUK3c2+K5Rs34jXPP6L1p3lwTSdQ2ZUwsaI0BQvAFZdCMc5hT99VoMp2PTMG2ODSpeoOGfVRXpdJrCKUje2Te+2urr6hYyqefzStkAoV2shS0TqzUnjy3MTq7VZTeqxHtQZ4jHNljlhdFOtCIs6X8XYiYvA11Ud4OyvNMFZfuj4ktlofWlM5hy5/mNMG0a/5pVr/h6SEhpH0gKglRF8VOWf0P7CHJr6mkEbo0XppbUuFlHDmR/jOCsgH5oJdZGGuyHCLKwXrQGgWqCJKXBjtRPGB4Wazi2Xp2pHlYkUPVuJng6hY+lRzcDJE1w8lVQZ1UVLQgBVZVuN86IsCLSoyfqY+/guUyNtcoVaMt3XeUjmrOrPT9gVbdlU+MmfZCjed/tjsuU+lCd1q7hxbOXPq/O//E13KTX/7xa1LTElStIKbfuCl+ROj5pjuHwH6Wuh+I3VoAJfXeo9BjE2+SPf9F+n+OFtndbryauWyeXPWBIVufx8z8fPj0Ync8p0rF02K2pnu48xmAuznorkq+v83V8X8OEllXWNS1KIsAhjm8BEqaecOf6Gdrdz9cvWevRs37ubiAqdwsupU4BftQ9rpl13ncZoq8Bo6TaOes1obJYiwN4ylQ4kBa6T6ZuyCWApJQCwAybrtcC5WJGyOaWRO5xpgGrt0AabxGJxrxDSJtCWmKXV22cRAzdRNXdqtmrZ63fqq6c9ka6PELzYOK4lhmttvin7IbRtadmK/7wMq3DtC9/Gj+A+M/d9pZOm4/yYfnwKZg63gAgwA4kaY29K/IxW2RixglplbbwULFGGJs3UsMLm6S9zYiqINkxgWKH+2fbtn7m3EAnfcvuZsNpc/6FbEAj+V/pVzD52infsw5q+554EOF+RcTd5R76vHxYGKyI2tBsizcNrHjf4jjsTuWQAO+3TLMuUwxbzHWVA10Z/ncA2d8kS60K02bky5SSiX5k6O+mC9SYA9VsN6Hci8S9SL6GXrRaT1epHPD7gKC0YOI+80p8vuWjFODuI0mJIlKwmx+hFx+BpH0HUXHBtBb71+xMr1RZ0Bz5vUygVPz16377WPN78yvoyb/My8Bx6Y8tIbe7+sfbN8PKXtpPvGTb35xqmZuQ/NmbVp2O3zAd4PXTjlxv4lWXlPzVtcPXLoDInxPPv8T9wUcRDgl9tIxIM8iItBF1GHLqbm0CXWYYpvHC6Nt7SELtgMRHBAZMWpAxhZnwdrhruyC+Xs16f//POA3qlFme602/OmzgX4Qn3aTyXRq8YNFaWhdsfjz3FvwP5Wgow+F7rpfgwtUy+3SmZjk1iE8l5QhFLsrDDJ/BirQ8msKoklFSqx2kqzqlRRI6rNXlm5eNaStRmV46ydlcpN++hb3L3RZW9unjGe5869qd55N8aN9uBX98N+mtWl6JXrUu1n0dyglE2zZ2mlo4RuDZ/NncvnnXsTvno1IeIBuJ6PfGPMHjmcEIfwojXUhH2GVktT3sbS1L6bfj7dSmnqtxPvtihNWUS9NNXzvVND9XmEOEiD94qKHSead+7bd/IelsuaXDVmkwVy2cbSFfzZLJeFc5jLbufMFptew4J8treVM8HfjmaVLCO51YtYBjc8wI3Yq1FcCF4961A7Kfz93d93ljocnKUdLPulQOp44m6hWzTrjTe4L6NZb77JfXnuTe74669HU4ArIeB/LfCrZd2K/nd1qxCdqz3xCA3SrEe1J+ich7X3tPe4HM6jXUt3Rk9Gj9D3tTCsEQTMfIjJxJiVh2tjh9UeVmVEyfEFyHwgTW4uaJAz0yID4F5Fg4tou2yJXveglpv74HxfD4cjrjBu4MhAMSjAT/P5p88lTlppEcdw4uS/Lme2iDc3bGG61aKehU6IN/139axh3MPRJbwzOoXbM4SfeffQhoVGPauvNoFbKfUkaeRGAuZc63eQRCGPzQhBbLMU1JrZCTajk8wwKHYvIM3NYJT6gZ8ebPpTGY3b4lZFux4OWABjdo23gsQK+ya9rt/3/imrXkmae9/wO+4YXjEv9ZVVU7j0sQ/OPL7pVNGgdoceOz5pbVbOuonHHjuYe1PRyZePzVjK9hrRfqV+ViNLIS1bpa569mOUy8ByI6Xar9LuM33Y9yxA450xGtMKaolOo79AjQcaHQW1ziYa+TrFqvep3QaNfhIbbIjHqKc43KrVzWjsRRmJOkkoXpbH+1g+L5kscytH3nXXyPvmJu14rryionzVK9qu3IOPHStfmxlcO+X44++0G1R0atPxGYvHLp1x7OWTRbo8HqPVQj3vIYnkJoLo3GKtR73iUb+SGLHGXWnM3IHmZCyuJyKIZJNQFuylk0S2W1XywG8eQrTdmCbEEKjHE7+edLHk0fdY1cy/Pjn0qvHFAyaUrJ0+5IkhvSd2HXQP/eKBHTfcWByeV+Kcv+u6QV0Kp4/R9zjjvI3/TswmQTJDr5UoaWE1XqyPBJj7D2QY5RK8OcEJpwWWUQniRRWTDL1vns6yGoyWRgklSa5HKWAJJT0D6MEyl15CqbHaEpP1yFjY2d3yfqymKko8uyUrm5vxwd8rq97l+cYyynhO+MdTlbvf58y5R2hOwldfyu+tblZIWbrP/d1xP80BGvH+wo7sXqJn9fuI1FRIlxJDEQnTeAdfX0toimTPU9xhVn/1hmpsKZIZKAyy+1Nk7DwzdMATnLfgUyzoOxUfYoM2QHCbAoULs5QfFC0ePh3fhgVML346Ppl9Wkfe7no1E6ck0KoTEXmrksMAvWGeybTxjjScKQbJmnBmPtyLFuZc867tH5HXd/F8+dLK2U/Y6D7talM4n6cNg63XXmviFpTRtu/Vf7hV+ttSZY12uEwZv693aanz+0ol1kNaDvYWjxUCR7M6fa1LdhA7G4BzIYIM1Xp97ARAAy+vQwM/wiGkzc7GHSN2NppgtwFhUijiYJmfwwV/eUMMKtsdsVq/r0WtH0jx6bUNcGX4r8MyWk03LtOK6b3acPqiNrxCv8GQThWVaAfu06hctq1M20mvhV86jl8revgs437XHiTWNVeJnWEWvS/WOOeJVeYErNizRjqWzOGvxn5YGBnrW7uVtt0ielbDf1jhHn/+J/EP8QDEHj8g1FV6/FedDmPa0QcHmQwx4gGrvGWCidSG8yyZkAiH4WxemN3wWIAW0oXtIs5F8vTRxwT9Zj2lrUvN18dqO8Jf6SGlowtxbq3EPqkW4e19bWX3DovTx2emhPXx7TzZvV2Kc6eTjrrR6C1kvQnf7NiYMW7NksBLjKdVtC3NoVXaaO0L7bBWchudSAVK6WRtuaZpDdqTNGnHM09uELjhk8ZNmjVz8vgJwznhxSef2cEdod2pot2kHdQOaANphPbQ6rW5dD71Ux/E3PnatorNn1c9JU2ZVD2/cuGLE6ZJT1d9xmQ2k6zle/ObiASZIU65YqA2fs2kOfdoJ6j3HkfsgEv10JnaTG0WnWkcXHB/EWlx9xCoNSkDmf1qyCxEuuNM50VSqwWQgPPNeNdlJyahToD0lbah2sTu7I3ExvstL5BXCCQUDikhFxNLu/YA/FPBVwfbhkJKagux4S2YRSHIA1BsGXh7oTsV9D8HhNcJpwKDxUpYrgUREnxT6Y43GFxGjpfoo+fRRBq7naTMkOYakOYRXZqTIAPj6CQmzai2HKTLPVn1l759e5gtZVbhxqG7tg8aP+Le568kzehA/pY5M/relZY4rn/Xtn18Lt/NuV1uvUF7ju65+frb9L7xNGEXPSK+CRJor1tiLblEj0flMfByen6fTMN+ftqHT/Jn4PtWSWvAa5VoA+hKuKoTpz5MDP7H1SvOWIBnd6uY6motumgsLpU37s5m96dIRL8P2CTrFVU9ySoKG/OWJcNmDh6bekfcoNFVT2qrenYv7mCe29syaPDwiUw/F4B+DojpZxE6Kh/Dk/BrAfVqJ+6hOdqRTxqP1tKFdJG2yKMtajzQ50vZHKspnc2xui47ySoX6Gltq5OsvAf4c9E4axEyrPlMKyU68/SZmaGwLq56xclF+UqTi+6LJhcpbqjZ+GL0XX0vxhCj5DOkiLw8BC8FsBeBmEkWiYgYaSQG7ywFiljHCj7YDjaLLKE31MFGAecdwqveUWlc7sxPxoAcr88tmTqzulIG6dnq5FKgtcpSm9g90YKN3RN9heElRuelJ5joZNzgFeeYuC90dgjGvpONe7+DpKyVnWNJLCOspkL8CoRikMogIwVcS7oewdIZwKoN6n8Fm0hEXJWRjiTKCbYrkxiLepemcjbGwysSyeezgMnpsyMgbxmQRffWpkf8rU2PJBhZe8Tp9hUXtz5BwqTRcozkLRTARcMkYodG/eON/YA/gMwukZRcvCMcZ4kPqx5gOD4dIqn59tCX+3QW+9ica22i/ldi09YRo8djrcwpXWLjMR632PtnyNaLtz4/hjtYv1v8GvQbrI/8j37Xl+IP6zO6mdb6iKux490uzRXreHdi2w/A9gMXd7wDLtxtREjKwY435nq+kBq6oOOdkC8oSXtF1Y8db1+zjrfPVRPv8+uPpEhMSvBgB8vfrEoA51jH2xefmKR3vP0J8YmNHe+A0fFOtgFscaVltu+AsEXxymp+AWt+411C3mSj+W33tNL8zr5s55uFkWbtb6m+ttX29x9MaZp64NP3tNYA52+OKRGv9ytBFtivzCQjrtSxzGqtY5ltdCy3Y8cyI/i/7VkyIi/XuDzHqLtk95K+0sw3PwuBVhPfbumb6X/lm5/VfbOwm13uXB/sT5HYcxoSxKMX+uYWVf/L+2bjeRVXKPwzb9B69Z+2ZX75cj0AbkPMJ+v7PdDok8c223EqeohAGO9tUjJCzQj4v/HKlyYu5jFap68L88iXJe+s7kbw/jespYKMPSQB51YvUU1NvEQ1NSnml2WvHwzyv6qoMslcWFa9k6nlRcVV/iddDryxT5x594MkFly4Ux+KIhEyUDuO6TRtPCW28RovT/A24cYEr4mKmuQ4C7yVoL+VUFCbrOd92GdKwCKXLOm3J1yRtJhcLqBuIvPlFxEn9GZSiMX9UUzHAiSHXN8qYmnbmlW0M6xiByKWNsFsfYRYzcy64uQ18xTBInilwUtH91/qFvG/l/1KzU9w2uEpVw7zNiqCvCQq6E7EsB/JcjFtLSz+8rShxbdC26XtozltrdvISy3puqyxfN6Sphhm6A+YwU9ScSb/YhST1hqKSTesZTugmITEFKQnTlaTki8HaAwqWuKa61vs/mKUMLL5jpntCFbxNMHKYjr2dC5h5RmXsPKAse9asPKkNGPbDtz25c2huRguMIlvW1JwsW2ktGA6Jc8Lx7l3xTqIRHns2Scie76YLOjBCJJH0UvMYLTWWKlfv3eosCgMiXCO6fnvSr4vr94gHPcd/dbNxiTA920SltKz4iesDnAjwYK3XgxWfAW1vJFGJsQy/CQ9wzfSd3wmDoZudxz4BwuPrPBByg6JZVO11dfsKUh6dN5017V9S0b3u65kYGF2VjiclV0otu83Gk6MGHFdTudw27aFXZDWMuEUdx5ipAd3BdhMEtmwBi/G+vO1Hj2t9TAx1Vr1cgJrbeHUGc9G59i8EClWeZeRM+q7aioAI2gqmzD46vWF+X1umnTLDSu7FPQW6e33Tbq+yDtk2qRru1y+jvK/f+9FbqvwHST7PPCddRv4en2ItmnqFb7yotCL21qG87FLuK3i3it+fonY1fj8cCFEZfZco8Zn1MSeakTY4Dt7Ro2o3x7Dvu0J877hk6+7SghtpV21t7fq+7zMdS7zrJvhV1VMhi923FGjvW9c53wHKlH+v76Onz3+bnjnijGfUut7+zS8LwP2wpmNZ+z1YRZw0RP2dNoU0cUqKDbjLiCDTEWS2egGu+k0RnK4kfB5zYg3WKCvab/8msYt7bHH+RlrGqRgeUUqVqzslqiWz/ZDJm1vxiiDXTgT0oX+Qd3/V2vqrDTWDFeO2di5cswhmrN9m/YpfAde0Z/jPS93s+cJYSWmn1EREczhMD4KQBUtoVCzpwvFxZ4uZJSJ8UkHism4w87beBegAQXwZ9dSKi8l55euZ//pOjGBrKUNrIYUIFQxxVyYTZ8XN8cEJ+jCYrXPCReVPOE6pXCd31teR+FCxqWarkPxOkapqrSVyhTb002Asd4TD4KHhXwyBwnOMB6dptjCqszjhGItoTlWO8Na2PpIxmcpshP4GEUeM8YaR44VeyHtC5TcOpWTsP4JMvImABdTc7F+lIodjvhQJJc9zSWXWLAThLVRlGOHZg9pseNDWuzGQ1p+nfzGNL197WAPabFjr3rn6bq951j6aXPVxEFamKe4XDVOlwPST/izWfoJ5zD9hICGqactzulq1o/OYNVWfbQyiOOV5ILxSvavecbVk9700ksvUedXxZN7W7pM6br5bS4YPYo/724qLu9s6XJf96+0U5yvbGNZ1mkadDnHuTw/vpUDf3rePCHLY50u2uZ3jx6HRvHPCNew+3X8pFKvjELOh0+w1MMR3/iAL3zWjtnpgfScRSapzng+W+t38qArAA2o9evRy+/C2bpaZ1P0ciG6tdoNPBVgD+iB7M0D/+Aohw/yJnkUnbfiBtpx5CZp65C/SM+HX5TE8f36ae3pP7T2XKI2lFZHf6BzqTaPPka1qUyPEPh1Zc/UIJ3kgIzH597+f+LPPhMAAHjaY2BkYGAAYqY1CuLx/DZfGeQ5GEDgHDPraRj9v/efIdsr9gQgl4OBCSQKAP2qCgwAAAB42mNgZGDgSPq7Fkgy/O/9f4rtFQNQBAUsBACcywcFAHjaNZJNSFRRGIafc853Z2rTohZu+lGiAknINv1trKZFP0ZWmxorNf8ycVqMkDpQlJQLIxCCEjWzRCmScBEExmyCpEXRrqBlizLJKGpr771Ni4f3fOec7573e7l+kcwKwP0s8ZYxf4Qr9of9luNytECXLZJ19eT9VQb9IKtDC+usn8NugBP+ENXuK1OhivX2mJvqmRM50S4OiBlxV9SKZnHKzTLsntNhZdrr445tohAmqEsfpdeWKbffFKMK+qMaijYiRlX3MBRNU/SVfLQ2jkdrtb+DYmpJZzOiiYL9kp6nEGXk4Z3eeklVdJYpW6I8Xcku+8Ie+0SFzXPOfeNh2MI2KeEktSGP8wc5Y7W0WZ5ReWqU5mwD9f4B+6xb6zxj7j1P3eflW+E79+N1ukyzaV9kkz71+Beq19Dlp9msejgssDW1ir3S7WKjOO0fkXGvmJWujHq5HWdvWc0/pNxfUxWKTKRauBgm6YszTnXQ6mvI615TGOdaktNIksebePYEzZrMG88g326eeyVfMcMxSU6qk3uxt0uMy8OTUKA1PIN0g/Ioqe/W//BB7P4Hi9IeabvO5Ok/0Q0mU9cZcJ36T2IayfpmcUHU6a0K5uI+30inaIm/adUcsx802E74C0holcIAAAB42mNgYNCBwjCGPsYCxj9MM5iNmMOYW5g3sXCx+LAUsPSxrGM5xirE6sC6hM2ErYFdjL2NfR+HA8cWjjucPJwqnG6ccZzHuPq4DnHrcE/ivsTDx+PCs4PnAy8fbxDvBN5tfGx8TnxT+G7w2/AvEZAT8BPoEtgkaCWYIzhH8JTgNyEeIRuhOKEKoRnCQcLbRKRE6kTuieqJrhH9IiYnFie2QGyXuJZ4kfgBCQWJFok9knaSfZLXJP9JTZM6Ic0ibSTdIb1E+peMDxDuk3WQXSJ7Ra5OboHcOvks+Qny5+Q/KegplCjMU/ilmKO4RUlA6Zqyk3KO8hEVE5UOlW+qKarn1NTUOtQ2qf1Td8EBg9QT1PPU29TnqR9Sf6bBoeGkUaOxTeODxgdNEU0rIPymFaeVBQDd1FqqAAAAAQAAAKEARAAFAAAAAAACAAEAAgAWAAABAAFRAAAAAHjadVLLSsNQED1Jq9IaRYuULoMLV22aVhGJIBVfWIoLLRbETfqyxT4kjYh7P8OvcVV/QvwUT26mNSlKuJMzcydnzswEQAZfSEBLpgAc8YRYg0EvxDrSqApOwEZdcBI5vAleQh7vgpcZnwpeQQXfglMwNFPwKra0vGADO1pF8Bruta7gddS1D8EbMPSs4E2k9W3BGeT0Gc8UWf1U8Cds/Q7nGGMEHybacPl2iVqMPeEVHvp4QE/dXjA2pjdAh16ZPZZorxlr8vg8tXn2LNdhZjTDjOQ4wmLj4N+cW9byMKEfaDRZ0eKxVe092sO5kt0YRyHCEefuk81UPfpkdtlzB0O+PTwyNkZ3oVMr5sVvgikNccIqnuL1aV2lM6wZaPcZD7QHelqMjOh3WNXEM3Fb5QRaemqqx5y6y7zQi3+TZ2RxHmWqsFWXPr90UOTzoh6LPL9cFvM96i5SeZRzwkgNl+zhDFe4oS0I5997/W9PDXI1ObvZn1RSHA3ptMpeBypq0wb7drivfdoy8XyDP0JQfA542m3Ou0+TcRTG8e+hpTcol9JSoCqKIiqI71taCqJCtS3ekIsWARVoUmxrgDaFd2hiTEx0AXVkZ1Q3Edlw0cHEwcEBBv1XlNLfAAnP8slzknNyKGM//56R5Kisg5SJCRNmyrFgxYYdBxVU4qSKamqoxUUdbjzU46WBRprwcYzjnKCZk5yihdOcoZWztHGO81ygnQ4u0sklNHT8dBEgSDcheujlMn1c4SrX6GeAMNe5QYQoMQa5yS1uc4e7DHGPYUYYZYz7PCDOOA+ZYJIpHvGYJ0wzwywJMfOK16zxjlXeSzkrvOUvH/jBHD/5RYrfpMmQY5kCz3nBS7GIVWxiZ4c/7IpDKqRSnFIl1VIjteKSOnGLR+rFyyc2+MIW3/jMJt/5KA1s81UapYk34rOk5gu5tG41FjOapkVKhjVlxDmcNhZTibyxMJ8wlp3ZQy1+qBkHW3Hfv3dQqSv9yi5lQBlUditDyh5lrzJcUld3dd3xNJMy8nPJxFK6NPLHSgZj5qiRzxZLdO+P/+/adfZ42j3OKRLCQBAF0Bkm+0JWE0Ex6LkCksTEUKikiuIGWCwYcHABOEQHReE5BYcJHWjG9fst/n/w/gj8zGpwlk3H+aXtKks1M4jbGvIVHod2ApZaNwyELEGoBRiyvItipL4wEcaUYMnyyUy+ZWQbn9ab4CDsF8FFODeCh3CvBB/hnQgBwq8IISL4V40RofyBQ0TTUkwj7OhEtUMmyHSjGSOTuWY2rI32PdNJPiQZL3TSQq4+STRSagAAAAFR3VVMAAA=) format('woff'); +} \ No newline at end of file diff --git a/plugins/UiConfig/media/css/button.css b/plugins/UiConfig/media/css/button.css new file mode 100644 index 00000000..9f46d478 --- /dev/null +++ b/plugins/UiConfig/media/css/button.css @@ -0,0 +1,12 @@ +/* Button */ +.button { + background-color: #FFDC00; color: black; padding: 10px 20px; display: inline-block; background-position: left center; + border-radius: 2px; border-bottom: 2px solid #E8BE29; transition: all 0.5s ease-out; text-decoration: none; +} +.button:hover { border-color: white; border-bottom: 2px solid #BD960C; transition: none ; background-color: #FDEB07 } +.button:active { position: relative; top: 1px } +.button.loading { + color: rgba(0,0,0,0); background: #999 url(../img/loading.gif) no-repeat center center; + transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666 +} +.button.disabled { color: #DDD; background-color: #999; pointer-events: none; border-bottom: 2px solid #666 } \ No newline at end of file diff --git a/plugins/UiConfig/media/css/fonts.css b/plugins/UiConfig/media/css/fonts.css new file mode 100644 index 00000000..f5576c5a --- /dev/null +++ b/plugins/UiConfig/media/css/fonts.css @@ -0,0 +1,30 @@ +/* Base64 encoder: http://www.motobit.com/util/base64-decoder-encoder.asp */ +/* Generated by Font Squirrel (http://www.fontsquirrel.com) on January 21, 2015 */ + + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 400; + src: + local('Roboto'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAGfcABIAAAAAx5wAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHREVGAAABlAAAAEcAAABYB30Hd0dQT1MAAAHcAAAH8AAAFLywggk9R1NVQgAACcwAAACmAAABFMK7zVBPUy8yAAAKdAAAAFYAAABgoKexpmNtYXAAAArMAAADZAAABnjIFMucY3Z0IAAADjAAAABMAAAATCRBBuVmcGdtAAAOfAAAATsAAAG8Z/Rcq2dhc3AAAA+4AAAADAAAAAwACAATZ2x5ZgAAD8QAAE7fAACZfgdaOmpoZG14AABepAAAAJoAAAGo8AnZfGhlYWQAAF9AAAAANgAAADb4RqsOaGhlYQAAX3gAAAAgAAAAJAq6BzxobXR4AABfmAAAA4cAAAZwzpCM0GxvY2EAAGMgAAADKQAAAzowggjbbWF4cAAAZkwAAAAgAAAAIAPMAvluYW1lAABmbAAAAJkAAAEQEG8sqXBvc3QAAGcIAAAAEwAAACD/bQBkcHJlcAAAZxwAAAC9AAAA23Sgj+x4AQXBsQFBMQAFwHvRZg0bgEpnDXukA4AWYBvqv9O/E1RAUQ3NxcJSNM3A2lpsbcXBQZydxdVdPH3Fz1/RZSyZ5Ss9lqEL+AB4AWSOA4ydQRgAZ7a2bdu2bdu2bduI07hubF2s2gxqxbX+p7anzO5nIZCfkawkZ8/eA0dSfsa65QupPWf5rAU0Xzht5WI6kxMgihAy2GawQwY7BzkXzFq+mPLZJSAkO0NyVuEchXPXzjMfTU3eEJqGpv4IV0LrMD70DITBYWTcyh0Wh6LhdEgLR8O5UD3+U0wNP+I0/cv4OIvjvRlpHZ+SYvx/0uKd2YlP+t+TJHnBuWz/XPKmJP97x2f4U5MsTpC8+Efi6iSn46Qi58KVhP73kQ3kpgAlqEUd6lKP+jShKS1oSVva04FOdKYf/RnIMIYzgtGMZxLnucAlLnON69zkNne4yz3u84CHPOIxT3jKM17wkle85g0f+cwXvvKN3/whEjWYx7zms4CFLGIxS1jKMpazvBWsaCUrW8WqVrO6DW1vRzvb1e72so/97O8ABzrIwQ5xqMMd6WinOcNZrnCVq13jWte70e3udLd73edBD3nEox7zuCc8iZSIqiKjo9cExlKYbdEZclKIknQjRik9xkmSNHEc/9fY01Nr27Zt27Zt294HZ9u2bWttjGc1OHXc70Wt+tQb9fl2dkZmRuTUdBL5ExrDewn1Mq6YsX+YYkWOU23sksZYFqe7WqaGWapYtXfEp90vh3pH2dlViVSvy7kkRSnM9lH5BXZ8pBn+l7XcKrOvhzbaTm2xe8RZOy1uwak2imNvGn0TyD9qT5MvZ+9pMD2HUfsWy2QlhntyQyXYV+KW3CWVU/s0mJEba4Y9SZcv6HI3Xd6hy9t6yr6jYlfOOSpMVSlSVdVcC51jIVX5Df2ffCT5OLIN1FCt1JVZY9vnjME4TKBDgprStxk9W6ig0lXQmSfXWcC4CGv5vh4bsZn5LuzBf9g7VD4rKBcVbKBq+vPUmEod7Ig6WZo6owu6oR8GYIilaqglawT+w/xm3EruMWo8iW+p8x2+xw/4ET9hHzKom4ksnMN5XMBFXKJONnKQizz4YZbmCA5CEGqpThjCEYFIS3aiEG0DnRg74sQyxjHGMyYw+jjjIj8KojCKojhKojTKojwqojKqorE/z+nO2BO9MUb5nXGYgMn0nYrpmInZmIuF3GMLdtB7J713830v/mvJctXYflBTO6Vmlq4Wdljpdpj/4g/OOEzAPEt3FpBbhLV8X4+N2Mx8F/bgP5yLp9LTVMqgytdU+ZoqTzvjMAELmC/CZuzCHvyHffGqaZlqgmSkIBVpluk0xiRMwTTMwCzMYb20IuRTLDpZsjqjC7phAP6Dm/EI64/icTyBS+SykYNc5PEOfHCRHwVRGEVRHCVRGmVRHhVRGVU56yi/wiSFq6y261m9r1/kMOulwRqmUfQtyt3S1Rld0A0D8B/cjEvIRg5ykccb9cFFfhREYRRFcZREaZRFeVREZVTlbLT68emHkREchKA7eqI3a2Hy2Xq5eAxPgndPvgmSkYJUpLG/MSZhCqZhBmZhDuuuuqu0eqE3+tlqDbLd8jOarXYEByHojp7ojcG22xmK4RiJ0ZwJCe/NrRSxN/pFFVdhyb60bMuyzXbJXrNVlq04e8TuVVBhp0VYsn0S5P6T3nhKrpKCrp9qP1gan7daSjD1/znsjDdmSMpvWQGrZAMyL3Nbwu5Qonx2j70vH+MzZCqKrD1nhe0/ds522Xbzkdlnx6+5e0pgd7x9bdaW2Vv2qf9pyeb4M+x7xj6WpHz6u0gEYRevq7vQjvtftzNXs5aNxvqbsNS/XcmmBmHfev8pgvEFlML3OHh1nfG4nRVhaVc+EwL+XnZek0m3k3Y341tKUpLttxNy5dq9ircaImsp9rnt432+ZB+y70rwVqlsGd7sB2wQWbwvwo56K6fpefU+3n7Fw8teH3ZehL2hGwrLvrGddvL6ftLfzb23f0E3FHazgguvny2+Mj8XsJ721786zgWE/Q8XFfh3uJB8lq6AsA3IuDLbF7Dq7Q8i6907+Ky4q7133XyzN34gr4t9aU9fsz5QwUWIGiiCR4rlceTjCZHLE6oKqqIwVVd9RauxWpLroE4qoi48xdWdp4T6qL9KaiBPWQ3lKafhGqny2srzB6PljBAAAEbh9+U6QJyybXPPWLJt27bdmK8SLpPtsd/zr/dcdaRzuX3weR9dvqmfrnUrfz1hoBxMsVIeNjioHk+81YkvvurBH3/1Ekig+ggmWP2EEaYBIojQIFFEaYgYYjRMHHEaIYEEjZJEisZII03LZJChFbLI0iqFFGqNYoq1Timl2qCccm1SSaW2qKZa29RSqx3qqdcujTRqj2aatU8rvTpgiCEdMcKIjhljTCdMMKlTplnRuZAJ87LVl/yp7D78f4KMZCjjr5kYyEKmMvuoDGWu19rpAlV6GACA8Lf19Xp/uf89XyA0hH1uM0wcJ5HGydnNxdVdTm80YAKznTm4GLGJrPgTxr9+h9F3+Bf8L47foQzSeKRSixbJMnkSverlDibRndmS3FmD9KnKIK9EbXrWI4U55Fmc0KJ7qDDvBUtLii3rOU3W6ZVuuFpDd39TO7dYekVhRi/sUvGPVHbSys0Y+ggXFJDmjbSPzVqlk8bV2V3Ogl4QocQUrEM9VnQOGMJ49FMU79z28lXnNcZgFbzF8Yf+6UVu4TnPf8vZIrdP7kzqZCd6CF4sqUIvzys9f/cam9eY9oKFOpUzW5/Vkip1L9bg7BC6O6agQJOKr2BysQi7vSdc5EV5eAFNizNiBAEYhb/3T+ykje1U08RsYtu2c5X4Nrv3Wo+a54eAErb4Qg+nH08UUUfe4vJCE21Lk1tN9K0tLzbhbmyuNTECySQCj81jx+M8j0X+w+31KU1Z7Hp4Pn9gIItuFocAwyEPkIdk0SD3p4wyWpjhCAGiCFGAIUz7OghSo4I8/ehXf/pH5KlcFWpUE3nBr8/jPGIYi5GmJmjiGCsIMZcC7Q8igwAAeAE1xTcBwlAABuEvvYhI0cDGxJYxqHg2mNhZ6RawggOE0Ntf7iTpMlrJyDbZhKj9OjkLMWL/XNSPuX6BHoZxHMx43HJ3QrGJdaIjpNPspNOJn5pGDpMAAHgBhdIDsCRJFIXhcxpjm7U5tm3bCK5tKzS2bdu2bdszNbb5mHveZq1CeyO+/tu3u6oAhAN5dMugqYDQXERCAwF8hbqIojiAtOiMqViIRdiC3TiCW3iMRKZnRhZiEZZlB77Pz9mZXTiEwzmNS/mENpQ7VCW0O3Q+dNGjV8fr5T33YkwWk8t4Jr+pbhqaX8xMM98sNMvMerMpfyZrodEuo13TtGsxtmIPjuI2nsAyAzOxMIuyHDvyA34R7JrKJdoVG8rx9y54tb2u3jPvhclscpg82lXtz10zzGyzQLvWmY1Ju0D7yt5ACbsdb9ltADJJWkkpySUK2ASxNqtNZiOJrxPv2fHQJH6ScDphd8Lu64Out7oeujb62gR/pD/MH+oP8n/3v/PrAH56SeWH/dDlxSD+O+/IZzJU5v/LA/nX6PEr/N9cdP6e4ziBkziF0ziDbjiMa7iOG7iJW7iN7uiBO7iLe7iv7+6JXniIR3iMJ3iKZ+iNPkhAIixBMoS+6McwI4wyGZOjPw5xFAbgCAayMquwKquxOmtgEGuyFmuzDuuyHuuzAQZjCBuyERuzCZuyGZvrfw5jC7ZkK7ZmG7bFcIzg+/yAH/MTfsrPcBTHcBbPqauHXdmN7/I9fsiPOAYrORrrkQaa8FG4aSvBgJI2EBYjnSUiUwMHZJoslI9lUeCgLJYt8r1slV1yXHYHuskeOSLn5GjgsByT03JNzshZ6S7n5JLckctyRXqKLzflodwK9Jbb8lheyJNAH3kqryRBXssb6Ssx7jmG1cRAf7EA00sKyeDgkJoxMEoySSHJKYUdDFCLODiiFpWyUkrKORiolpcqUlmqOhikVpO6UlPqSX0Ag9UG0kwaSnNp4a54tpR27jHbSwcAw9WO8n7w2gfyYfD4I/lUPpbP5HMAR9UvpLN7zC4ORqpDHIxShzsYrU6VaQDGqEtkKYBx6pNAf4l1cFaNc/BcjRfr9oVySE6A76q5JDfAD9UqDiaoux1MVM87mKpedDAd8CAEOEitLXUADlC7Si+A3dVnov3sq76QGPffTGbJAmCOmkNyAZin5hEPwEI1v4MlajWpDmCp2tDBcvUXByvUGQ7HqDMdrFRny3wAq9QFDkerCx2sV5c52KCuEz2HjWqSTQA2A/kzOdj6B09lNjIAKgCdAIAAigB4ANQAZABOAFoAhwBgAFYANAI8ALwAxAAAABT+YAAUApsAIAMhAAsEOgAUBI0AEAWwABQGGAAVAaYAEQbAAA4AAAAAeAFdjgUOE0EUhmeoW0IUqc1UkZk0LsQqu8Wh3nm4W4wD4E7tLP9Gt9Eep4fAVvCR5+/LD6bOIzUwDucbcvn393hXdFKRmzc0uBLCfmyB39I4oMBPSI2IEn1E6v2RqZJYiMXZewvRF49u30O0HnivcX9BLQE2No89OzESbcr/Du8TndKI+phogFmQB3gSAAIflFpfNWLqvECkMTBDg1dWHm2L8lIKG7uBwc7KSyKN+G+Nnn/++HCoNqEQP6GRDAljg3YejBaLMKtKvFos8osq/c53/+YuZ/8X2n8XEKnbLn81CDqvqjLvF6qyKj2FZGmk1PmxsT2JkjTSCjVbI6NQ91xWOU3+SSzGZttmUXbXTbJPE7Nltcj+KeVR9eDik3uQ/a6Rh8gptD+5gl0xTp1Z+S2rR/YW6R+/xokBAAABAAIACAAC//8AD3gBjHoHeBPHFu45s0WSC15JlmWqLQtLdAOybEhPXqhphBvqvfSSZzqG0LvB2DTTYgyhpoFNAsumAgnYN/QW0et1ICHd6Y1ijd/MykZap3wvXzyjmS3zn39OnQUkGAogNJFUEEAGC8RAHIzXYhSr1dZejVFUCPBW1luL3sYGQIUOvVWSVn8XafBQH30AbADKQ300kQB7UpNCnSnUmfVuV1TMr1pMaCZW71Si7KoT82vrNi6X1SVYEa0ouNCPLqFJ8AFyIIN+T/dgzE0iUIokGJTUO69KpuBMMvmulUwJ9if980h/ILC56jecrksQA2l/AS6aDaI5OFmKat7bdan+r300lAkD0LoNugWfkJ7RNiFeTvHgv7fG/vdo5qh27UZl4kui486bLR98sO/99wOBPNFG3DKAyDiqC6qQppEoQRchTTUFVEFRzQH2NsFt90m8QUejsbgE6/BWmkLX4fd5vAECkwHEswxtfUiCghDaGAYwpgatwgYKG4TlUKoH9digHpejYQwHP0NtmJaogVAjkyoG1IZ8r3gbHWBia+bwxWhFrRPgrS2gmhU1Xr8rIaCCoibqM404fhfD7va77C725xP4n8/h1v/cApslQXqrW0G3H9DSgVJs2L2gO5q7L+9+4ssON+52W74RzR3oLVxHh+O6fBy8GDfTgfxvMd2YT4cTNw4GQBhT1Vq0yuuhOQwPSW9hYllqBE5hgxQuI0mxcHotihoT4K3CW82O9wQiilY3PEpR1KQAbz281Zreu8KESvd4PR5/ekam3+dISHC40z3uFNkRnyCyQbxscrj97LIvPsHXNkPoPXft+Y/2b31x2973c7Mnz1qAbbY/e/y91XvO7l6Zm1OIk/8zy/fo6S2vnom/es1ZcXLp69PHDJ86ZPLGEcWn7Pv3W788tLhwFkiQVfWtlCMdhFioBx5Ih3YwJSSrwMQTamR1s4Gbycq1JyqgRqVpVrEaNp/TEsMjt6I2DLD9Zj+0ZuHphorW5t5I87t1jfSnaZmCm//KTGvdxp6e4Wub4GCCulM8fqcupd+f7mEMYHpGsn4lOfIC50byojNra86C17bOnVeyqHfXTr16ru5J7t+K8rattJLPdO7Zq0unPtSURQ5niUU5JdvzOs3funWx6elhg3t0eXr48O6Vp3OKty3ulFO8dbH8zLAhPbo+M3TIc788JmY/BgIMq6oQf5EOQCPwgg8W/IUeNGCDBjWKn8gGiVwpUhpwpdCaWRrwTkhpxjulWQrvrKFJe+iWuqEuwVqXE9FA0ZLwHk+uJKuuWoy8sJpwojK5mnC6uFqYMIMphcnp9sqMusZS20w0ca0R4p2ZGRkhooa98Nqgxw5sKzzQZ+xIfPzxrdMD5YO6Hn7+PKV4cdU0usG1dW3KpEmPtx36ZPeBuDBLfWHS8k6vf7BzQe8Xuz9DZ87bVLXt9oTHOnz6xDgsTpw+b9Iy4fOBy//VutdD/6fPWEB4XnRBUPc5SsjjSNUeh4HlPibomIsvSivocvwEEBbQZuRFeSRYwQJqnTRV1DffZst0ykQwKfYEp8njJQum/jjXs3KvBZf2eMGzYGoFeeZT3IzPdZw2jqbTz3rQWfRmycDxXXfgcwAIHvbOzFrvxHhCTN4Mm92fTog3M8FmI5kv/DTfu24v6b1hsHf+D5NJh0/o8/T1LuMn4U+YlnwGs7BRt/FdaAkdCggNyCChh6RCHUgO7bvIdlfU9z1QlwWSRNXCektaIlsqNVNi7jnVKdlNguDFrvRMK2xlWRuFTVvRk4dm7Hl7pnCx75px2Ju+Mqbo3/Sn/phMv/w3R/40rBTTxXchGuoBe5kKuvuQMWxfurtzuKxuK3N2Vh/ZiIV0xB46Agv3CLE7aTqe2InFgNCQlmM6XAUzOPmbNPFeEOEvBc6yV3ct8XJuVn/xnSG0vHPO4q0rhh3jOFJJEokl74LAOGQ7p2GkY2ILk1iaiF+RpDWAsJzFsUlwmnFdP8SMiTFj0p2hFH4qk0crBw9Xy9tn339/dvtBrR95pHWrhx4CBFtVjqDokdAODFpkKGRPOt3o27WJDNw4U24JQGACs8IoZoWxbL32oRWj2M1R7Oaws+I2GKVoVjR4pkgpFOJOIYJfsfna2uxe3S5MVt2dZIpR5RVfXxfLv/u2XNg9v2DZPJK/OH+BQEbTvfQA+tH3Bz6K7ehZeij224sXyumlihvnbgJCCQC5LL0Hcg0uiUGR/pxsgMQNQkzThLB1E4FPspzCbZX8qT5yeQ9dTGwNxdP52w4DIPQDEH1Maic8BcaAa3i3MyLSBDRBcfKVFEWzhOcVHps0h1MJrefyY41fYDGmse5GEF2ir7Ij3hrXY9GERWt3o3D5eAVLa6aRqwtI69mbemSv3LDk6K3zuy7Si7QPIPSvqhBuM3SemogRywDF1qCrywZ1OTqI1f0apGkfA/bTNgGO19L4rwGA2WqsQdNj9cwNFM0TJsnuAf58XUVtEGCtlhS5oT4mhhKSosYZ8kgpJjcORUkupNeNuYtzCqumFOwOfnTqm+kjpuRUAR1Oq/YUzspdtn7VYqEtyc1GyB//5udX/jtAa+FRZx/4ovzdCYuW5MzOI0DADyB2Y7oaBXWgizEChN0ClxUtIseKzAGGhWJZDvIsRzPL0XpCqd/EwTvcukmjD11Wk5B77NieYBZZcjA4Fw8m4Ndr6A7sPlr4qbI9OdYEENYxG2jJUDSEQSEMyJZFhiFMPrcAVDQxzJ4pFjkiU5pWLzwpmeqxSc62NcB3ID4M1sSjN/MTduZvBEapzRFPWDT2+hKq2XSnmEynupJvgm+1GJl3+JtfrpT9at1pXT5p7qpN86d2aEOukAvb6YSH6e3rN2jwwoczZ6svrdzlbwIE5jP8DaRdEA8u5vPCKlxbAr7/GCkBVEvgiFQUrUGkHjjcsmi6Bxf8fgVSBWbcjholEJ5JuVQF8RMO7/vst1OnaSX2wn+dGbA56eWpMwtWSLs2iLduzKe/nrtBf8ZHg51wJRZLwXHZPR9/+9r7LxbuBmQWCGIqY1+GtkY7D28Fxy4pkQYO1QaO6OYeVEwNvvZf0qeyQrgkdb7zvpRYBCDAOMZLHd3KXdC8Zm8d7IUO9vawsnH98locnAsvsyUv9ovcUqGel+tWnFffWUukmagORUuJJCtkJKEsKyKTEHimpfOFes7ZNoPRVjFhcPaCqsCZ4NzsQeMqykq/W/PSnTWrcuatpt+MXrigfMEiMX10Ses2H0z+8PqNDybta9O6ZNT7ly5Vbpm2rujWsgKx3sKJY/Pzy5cAEBhaVSXc0uVsDL0hXO7USGlnAzuXUrBzO+FpBAj6L7tBRQ1OXY2u5RF4BqRLxLXB6lBAcvuZl0hlLt5fk00LD923ZeCsvcPHnsi7dJuq9M3G3s9/p9/329B449RpqwvInA7PzbiRt/KbGfRD+nUG7UWnSuvFL+9kP9f13Zt7175YBlVVkMsi4GjxcfCA7XdAE4tnfwgTQInwhIk8kLE7m7Ko3IPd6WX3fCJMQBmUGAAlIsvW7wSEzvCRME3sCjIkROgYu8r8up5LoeRAPzrQTLIrTzG3NT94AKevxGkHOL9FWCBcET4GAUyQCsxgWOKgkxhp3ZpYK6rzlEK4UrlPeIz/Ca22BEs3AyDkwgHhmvhEGIsenDkWKaBKHIuOxC/UD44UelaWkEUo7KO5K+mCUiDwRNVvwiS214nggmf/InYls0Ey3+v6UthY6itchUUF/jZ+QSh+seCVmXkvfmWEPL+Jpbzh8ngYaftUznNjsobP2E0+e/fDsy+P7lJWXS2vm7zouYUDRmdNHvXvlw8f37WzZNSzRfSj6vIZCIyg98sXpDXgh8fg/4LaNpSbmBlis14BBbS4tmYOMS5Nk8xx/JdZ0dqTsL0F1LaKVj88wUrWZgG1WZrmDs/FKdojJFJvmd/y6sqbmWHjEjkFmeclNnCliMQk20Q+cuoJPrHbbCxoizaU9dwl086ZkI/FXHpnrz9jcddlK+1xU/dnPTunW7p91fglsp3uptpReuTt6Jjl6D3d950HUh86mXWHFr0VE1OOM364jUN33P25zrO9HxjbGFu1e+SFtfj7z/SrbT3+9dXJ11BY3fzh4IUvr7+NC7DoMM37/RZdVdbCPcHb9gZuxfpox/d+uE770uXLioYPsOAfDb/nLDYAkBpKKpggCjrWzp5rHxfIbCBzdbCIRPdfkVqrRemToZIffehmvXAyuDH/EGmxjbQ8GHwKf7iFM+h8dujSjdQjxSBAMYCYp2fuCZAEPQzxsnb2BHqEdKZpceElzXE8ieKRSAkrIRpdjc/qCmccshvZkCUjrlRXKE66ivHadz9MHDopn35FD+ODuS/RT2kppsxas6SA3pTUA6XDNzR37Z5z4DopDv66eBqa1s0aNWU0AMJkFhEuSQcYhx2MftKY67ITkrgAd4A2g3OsGzliSRNXLtGdDFZ/OtcacLo9TF0Iq6ZteuJ7qT698T2l9OgKjNr5FSY6y+puLXz/9CFt8/YGeOrLu5iNGUuOY/prNPj5jvX0x7tLv6NfrXgbiM7yIcZyNDig/T9wzJmLCaNirMbW4lG0OVnkFk2ClXltVtoTbzG+tA8bb8JN9PKBs8fK//j6gqRuo8eO9jtFj71OJNvdxRhf1eMW2gkA6kg66kiehrBG/Sk/ixZlvq3RBqcoKoZsTdHMBhdpdTmq/4TrwXzyv8ohwqpgSzKZbAlWbpDUjbRF9fppbH0LPPIPuq5ZiBhW74j1ZeOK7ur1TgQ3lAq5wfvIEJITnMnXqgMI05h2XGPakQSD/7+04+/qIa1RKLo2Sns7rlFSI9Lv7YcbPcM6rWEEmlRZ5A7H61eA7ZLTTVwpRKjWHB46xGtd6R+qRivWEPRhwk1MSCrNoOVlh/H6/lEv++lOouwfkbUV04/Pxi444usL6KI/0arJv9FPWrfHTutD3Elmfe96GPfOUOYZFMqwqyrwqoGTusmC2VqaBftFbKheXXFKfaz1SeayYEppKSkvY9s3QFKDy0g215/3WDNZr0Yb/sORsf4uH04uLZVU/pSfVUAn2M84aGXMZ8PBm+Nj4KRIA+CpvzWUfvlCxacQXXb39OWfS/PnTV6Fknr39umK8iMzlxQuhGp+JJ2ficbMM1x411Y041kyEJ6FPmLtCn1hBEyDRbAOSmAPmPtp7YGRJUuEX7dnyB3lnvJweZKcKxfKr8vvypZ+DKtJJw99iG5SX2PkLfwq+BEZ8QV5bTeNZxS2JoHgzMqz1VbQgCGVoMk/WQFE6hfXdB+OIFrl0rINzJ6qJZa76967j5FXw9YYlMAQo8Mn1Xw5BFE/4A91URCqvizEx+SyoxvtrMcteA2v3S610ZRV1G0vZXvwH/FVFk4yydC7w8Si4KbgUY4trK0WeFLDKG5Axk0JA6mtPQbz1IgEOiq944qFnGYMqai7rIx8sl8cfHcjA7JWfB4ITKqqkCzM6q2QBO2N9baRiFglslASaxVK8aTantNDGYTDq5+JmHSTtmVKluX0lvoG/X0VWYnRb+zE6OX7A3vfPS2c3b3nhECKL9CybcXY/lTWGXxsezHdf56ggA767e8j79IbGBeE6qhQqlfLdnhKi4rXS5YonsBBmILahZMWLeCfXbMQjm0cPaeIeSFW37uro6zXhVmlpO4PGEf/+IMWY591r75aQNeT+4IsLv169NznG1bkz1svAIHRVVGSzPhzQApDZXY3DuVtat1qVFYGxGrYP45KMFv5fVZDVGXZXrKRU5NkSpX/jtdkRivmTkUxh57s3O0etyrjtvTkvndOC6dxIuf2LP2454mpv9ru8VtCy84j+8/J+b1Dr1fzuw1APKpbhxMGaVKifrwi8S8k/2B0hgpbU0JplmJIs6J1y+Aak2AMR9WkyyZ0uLGGd7KflpThp7+jZVUO9jwVHIPeguItRfQKeSr4lqRev5B3rG2wMIZ8s3rGwuUIgNCNxa1sfl7EUIO3CVvL4O6NH45UmR+ZsFarE0boqaeHb4+hHKzHP6ew1ljj8hKQbcSfvqFw7a9xu+ke0vOPG2i/Vvjt3LJta5dtWoMjTw6hFV8WUuaMPnql6OVCkt/p46I3bkw8MXX+mplj+0wfPv3VsbvOTzgye/7aGRde4FK1ARDX6HluK6M4RvplxRDyA9XE8gi6hrbYT1uKwyXbne8l20ZAWMKYKmHvtMEDmmSPZzIb3aDhBMoQa7Q6BnORwWRKAS9z36FzEKtYgrTqmu8HepPs27HllTcltTLlFL2jECSfCtcrPRt37tgoXAVAnr+LQf28o50GJl7vGBM8g9MzujZAQfdpqXqy7iPs69qZ4M2S4Oenq8Rdd7qF/OiDAPJ3uox9DG7B6EANphnOB2oUOo4N4nQfL0RxbyqHuli9YwQ4M9HHGjvH4TVxMPhZg6aY/DLWbZL0aRndtJOeczrp0Z10cykeL31TuFVpVg8IN+90E1PHjr17leFDaA8gntLj70gjBWE8tZ2w8UgcUOTx1ZILhfA6vAsiC7nVU/nyWrlY3i2zKQFkjt0iQwi7HnD1/31kPvb7lKbjxZt0HS36DC9R3w1hHmkVbBVMIe2CR0g5OcM5jWNI9zKkZmhjRBrGY0AaBhdajwdCHxmGM67QqFIadY2cJ1crxwZvkCRhBX9/TwBxmh77Hoe/Tz4ifYoI3NHwcwcpPGmRTGwyFPv9/AzCge2FR+9eExpV/iD8sWHDcnHexqV8vZX0CImW54AJUoAhVk2182YhUttZ+ORZM4nev58uxKnSV7enFJne5+9pwr41tKv51kDSIm2JPci1o4lKBqqSeptnMRZ6BHP0VVP1uzFNJZH4VTQm7HZ+hsKSCQtOo7llZfKcW52L5Dy+7iPkshCv25DXYENhVQ9oaOLGwheRuFOornBL9r2BzWdjs+3iXtqIXAw2BQSxKksoAgAB6ke8pnZCJfHznKLKUcLqNWuAa694Ca9IFARwg4q8yMV+9z5foRI6WXo7jiQRwpM9vvyVTZR+wh7zgB43K4RvxKehETSBqZqzaTO9WFbU5Opo42QgnIm19d9QYROnnnlF845HePZ4ZK1ti3ZWx50kw7GeOzKH93h5vsx9uu/edwv94MdpjXc69NM9dzI/2muiRM19a/NJxK/fnjh+SO6eCQcn7T0nemh0r/XuFfSNicndc99ZXLy3x6AJQzs9u6b33ldpnRd7K0v7di4/3GswEN33JssAdaAuDNVs9epzbDZFFQLAvFI4s0w0er1a5xiSWdCTzRjeqTG1S3SnMX1gJz8mnmNnJNusXi6dycrdtZh8s/TkOEvJ7nG46Mbulfnvdevx9oLVxHqLnl0xU4bgR4vpBRqUPjxVQluUnAKE/7C9qmB71RC6aEqjJLZ0xNFbYu3cBiIzGiYfP2SLZ60RHqfWV4dBBKu/mnG3R98AxjZ5aMhq805p0sEx/6N3J15e/e5P5p3mgqylL63LmdK337ah6EVI2vh73pUdWQuPl7r3HuMaNYCh/FEGiIN6jOHE+g04RYkhhuU0w6moIZE3opeEGJ1hveMM2//2s589neW2TsavmysRCf0DgkwrF2JAxf59Y3eXWMYe+uC73UW56rP/eiOviHhuY9o8kn4HJuZh+i3T+4GN+NPaMxx7P4b9F8awg3GcpZl1jjl7LPcKw0usbQD1zMDvq5f29v56H9cj/WodhigRH7tCd5qNOZiUAv57J9quhITQSSCmyCaX3+MhT12jFdP/N/fsN0G3+NaiwXm+8Xn08rgiG2lkzotH188pW4IF9BsafGrzwW6P9T4tHHtlVZ2lLwHCAwDkmOxg0gzR4hK4FUZI0ShSwRMjQ3Ft+TjfaEiPYyOdpWoPML3i5zzsJF7/1OA0hRSIfwD7cvv2PSWPPByV5u87+Msvhe0FY3fssxZasgZnF1T2AAIDaU/hZ8Z4XWgMOVpKqofzk8KTQzDAC9tfYmT9a+ODGjcV0hsup/b/uHsP8CiO5H24umdmV1mbFwSKC1qSESjawiByjiYbBJIJJgsRDrCQwRiTBAibIJJE8JGxEWPSioyJ4mxEOM5gnI/D2RecpW193T0rNL3Ahef7PekvPTubd7t7qqqr3nqrNtzJQjcRHlHt/DlmniIFYYp7RJjSfAG8O03jojC5SqsVq6yvz17MCdzz242Zn7bKmrV/cVHOmVPflK1bfOC5gXsXU/nyoqbLZ1d+euOfowfnrF6/LHM+SvzX0etb0Peb+D6+HED6xABgpnocZLHy82JKEFB4wevjd8LonbDacJ/tWUF6M5OaFMMiXa67PKRHnfIuoMGSB43PeX5JvMcjHS0i+d4U/KeZU7N6VzE2Bwa2DY9TznO+WhvVEBpGP5m55kjPrHtEHnANScigCDCMjr420OO5rOHxcjqKfqpNm+effRZw9WnSAw2l3xcCDmbDnHV4mMK4ffAE00tPsA6wo4aAwe/2BNWk6B1hU2ycO0VzgSUmgdogepD7rZNjktu0s6alpNKxpMrpld3IZcuagA795eMoulkGHxYgtg5yiAHouGbqgiymIqLWPxmDCeAYiz0d/FGYcgii/qDv6UchmIuGoFoQJk1zCstmeDyjUL/PyDB0+w76aQ5ZaICqkbPQaPKsdxkg2AyABhrAD82Keiyaxc6EAdgcCwAMs/nuMUuVuWUTNewJBk5Qt5p52+gdW82devROPe6lB/AEuMKvSgMEcL0O836czDik+iRVo2ewG644doXSlVnlXzyX+tYf0GiDZ0L+i0uCyx4c6eCR02cvf7t3FlnsbYrLZ0zPG+dNxBe+3VT1tZxeo0t0VmborwZbrOKsxIkIm/ijEQZzz5k1CNZrldNfrVArw9zLOrWS05ds1qsVHRRgGEa9jGQ6qnCoBx3UkPqRPg6rVR/D+2+AqlVwfuuKjDC6dMAYctQUQQ1Hji/hsPxPCj9C5jmfvXGP/FC2a/mKnXuWL92N3VvIMvI+CS2pXI4SqwIP3f3okvrRXeYBkSw5io8tAqaoVm1/tjL8RtBBXRQqrJzFPxxUQkRf6DE7tegLMVFnkiA6Q1Gfn72Q69kTmHvl3S88m5fsHtB/32vF2PwLuZHv/UW5O3s5uUt+l4/eWuutXHOT+xkkS/rBN4+Jop/xH3YOLuQWYfX9PY7/6G6kMXjxEXfj6wtncgKoQ1d2/itP8Ws7Bg/ZvqgEx1ejxq9M/j0ey7NRy6qAsltvYEvhnzXZxUV0BqHQWZXDWKZRB/gLg/XbEbj/jHURV7CPh8CX07e8TlzUpOWRdp5D0rBdqfWlNcZNXpDT818PA8R9tONyb47VBGpYjXC6BeKjKtWvIcCGUhxeUGtJQCPrm0pjK+hRbSCSXhvUcBD8Ga88l69xTyScSx7s6PPZgWP3y155Ycy0Cci+v/+XngWXcz1KwbTx81B0j/7PDpjR97Vjp9b0nDKkS4eObQbNGfz6geE7sjInD2RxXfW3eJDSFuwwUg1zOEVEo46ehFDnUU6NRqBjoZ8ksFAC9FNldBoLs2Nm5tnw027nYQvzfMxocXl5aruYp7t1mvvyhQtKW/J7oTe7XbuQdbZ1y/CWQmQABEvout+jJsJErRXFMESMTBiWuN3oCdka6Qo/xgdoyAbD0SAmkFRApUaTrr91GHku3+rsKZ0478oFfMbb6ecSyVp5EQBBLIBUJqc/HgMSRK7OIxiQImBAlF0ZcpLMXUFmn6yUMiovMiuIoCmAcpPeDIEsVQkN8/98Ub5FyX9y6AXBEt9ktKugYN84OAbEhmK1JsndKzzkwjryWzWsIxeP/blqbbXUqvKilFz1Jzm96rbUBBA0BpDK6diCob8wKB3qU+ffoz5BMoek+NUj6I6VbeSSxNAd9MvfPyAlaPLt33//C5pMSm7jA6jA+5X3I7SWTMQu7AQEDtJDKqWjCadeEZjM/iul8wCF08KcIwhjuq8nUwDTU20M2OV2pzgZhYCO4/uqi6TXmHuuTokjxsc1Ji+Xo3CpaWU0+acUuk7uOWaK3BwQDAGQ3qEjETGgOv8HGFA6nlO1Aw/0HpKSi4qWSHU3vMoxFPIGLjG0hjrQUrXWjeAzD02guqgjhkUbWRZLqo2iDPzDOQqckuxKSUxJSWURk5myRCiL3OLEsw++c+sWPvBO/PVdu6T3yRuJ909c+tfr/6w4+lnS9A7kb+VfDH3+/vvku/ZsBAcoJ6zjE5mqiPlQHdeuJf80nGKvttLxTvONV9HGyyCPOpQxH8y9WTMdr5mO11I7XsVi5uN1plKmchods4nGFQ6aEU+yx7Et3Wi9ajx8+Hr8QRXdunX4QGU7FHTvwYDnvrqKIjpMT/zMc+OH1/9VfuLzRPb9r6I35B+kOHBCe9XMcwNQ68g4OOZUGs4DfVuC3paF+9uyYCYizAI3x8wiG7l9djipsKTIPxxf2nX+nu5Neg/Ydqyg5/LStpE9R0qBJXdS1jSYOAJvfb/ttiA8YyRgKCDr0Vi5F48fEnXxA1QwaE1QaaHkBTNtYdCc1WVlrjqLG/bufljxgvdXfqv09EUNiNYwBFMmajzEwnMqxLnYnGu90Dr+wLGxQg99BHHow8ZsNzvWYUe1nj8AYtBqLzAVJwuvzRBQkO6jKQpiuLjK887l8oOedWcMGgiy6dU5Q1++EvHV13Go/j3XLRQZ+/knzlvraqAQBMMAZBZdxcJctb7/uB+B9qNtPK6LTlBHRtM8d2E0ylVPR6NM/WwE+iGr9gmo0NS9NJrRAR4/Q+S0GWONsYwml5bipluVJOzFlAqKzga0wR+hyl97NUrEATu2Bv50+dTHp+fljF8QiDLwlHsbhxUXB76aFfBRMZIvfX/r4MS5G/NJVTEApufmvjJM/gfUgyaQoeKmzbR9qdRdAeL+ZapgMS4WUECKRbn99i+30Z0WT7XEncZ9mDSnkXG/nEZkczgSOamZc6HkPluuX9uyaEHBuKmrF6wueff8lrULi6aMLVxYlTX9/Ofnc3MvTM09P33qwgVLFq/YXP7+m0VL1s2es37pxjevnt+yagnOy7v1Ut7NvJduzpl9i2lVNIBMkyXgqMkBOOiwHUISs76/vxhulZqqEOKgEz4Ubo224sxSKxM2elQtWEcPZvpoZEc1DNfKZQXH5Bnv317D/ef/KAmPRZM+JCPQ02Q+mk/mnyWLGPKMniEj7klheLu3Rf6OueQUaj93Rz6uYOdgNbVgvbgFM0IdZsOERJWqIKkp1TXqEDDXcHVZWRk1+c6qr6TL+GfA8Dwxy3OolCZDR5ivujp1phNiVT4ptYgoLw9iH+UI4NU8DpOaoaO5OzJ8MFkYFUgBcWnh4ky6FiY1rfbByLQW/CuYkPAqIiFC0AjezJGJT0l7yPFujqlM+JJ+cq0X6ZCjcEOKHWu3nVw+5DllnbqSqr9OvdK5oOzQ5iU7V14/cibzSPsuKPjjL5Hs2V2wctvTi1H0ntx072fP9+jbI/U1VL9Z7wEF6MDJgS2XjN596elnct/DC4pmZg0d36ZFzqacsiH04Z2XP38vf9P0Fzr1bde3a/Yr++rUs47p1Llv++fMtjGdhkxm52Gs/Hf8g3IBKMgHkYyhqauWYNlOo0nTAh7PaRhFw5obY33sxbe1a2UYJSxS69fUZwRBgmG0kutvynmuac/AWtWd3oqThZnMsWOqT+Oa05PVvEZaU+mdVO7DpzbXSLeHwqVoCWeqQc1TeeI+4RAEmYLoA2FBEi9ewkLg8/CeWo9n3UpTaXa8tuyrOdVgWX/6uD8sOvs+knZDm4Xy9i2U/NXAxSiPNJMeQxPpPsaCPPKtkuKTpzdt3f/GyGEjJk0aMTzTi7YiK2qLLFtLyHfbtpJvt0w/jnqg+aj78UPk8MUL5PARPHDDtptHppTe/OPaUQOX5eXOXjZgzML95MOdO1HD/XtR3K4d5N7ecvT8pUtkZ/kFsvv6NTSEawx+Rwrna9kQJqlh8W42szDGjRfp2aocb9fqOlguB8t2nujgV2zXt1OVrt3mzcHscU7JkPSJjhj9AtUkOlJZooOtjltbK5rm0LIcTJbxhBBDz/mzFuzaP2lupz7b9i99bWME+WPTIfWn9h+Kz8bFD5r7Ys7s5MWpSSEvLihcRM5n98trVG8lykgaQfnIY6FIGi29A/FQ+jsBI5SijtUEEMxDs6RTUgwoEMGzbaiCGjaRHcfcHU4YPlXmzZMy0CwUsA1keJ5K3n26WmEQBcnQGvaoqW24yqcyN4IdrfzoEhkgfhCZVagorFdbLBjDfXjKGVbjNMZaHJXJOFMclcmUmDhfHeHpFJR5CFJMKfTR6FqhbBSdwt9rKk2oKE1IYAWXrbEuVheFLM3GaLa1Mqgws8vJxcwbc9pd8cnueLc7SSuecT3vL27TqUBu3YZsxcXkWy6Q6MwKZNuwZ/5LyPx6mGSaXrq565Deo5fhO34yd4nJ5B4Ut38fimUy+RN5W+r3an5eu8SNrQfFmxp4zFnyfNw+tVtrAASzlVipPbfnZuDFJpLI6Zbae1NxuRJbCBgWSGfwXHpugsEBCeLys3LVkAQ1EAt8G2F1uOhxnXXWwEk2x4K1E8atXj1u/Lrq1O7dU9N69JDPjNu8afyEdescXZ5J79FnUnfAkA0g/ST/C4IhHDqzajQxog40Pa7OrTRU4HsoYQa2eQYr9RScKdbA8YK0pWgSWbOLzEOv7ELtqk5KHaRBReQFVFKEiitD17OVao834X3KcXDAADWAo8lQGyoJBC0b272wUEgV5tC0Xg2ofTyMV/LYHMyR5YuNauuoWImqLRzH4n3ePajZ5LbP9uhSvAsFbJw4oBQV4k2TUMTYTi1b93xm2pp5U8ZN7PM6IGiDC/FGpQziYaka424kjk8opWLjg7phWinVkRyYB4UgZaoZgHKPhEM0JICklVSxARtxLXk6rK6PyRxfq1E2XlOlRmqfV5eaID0VXdtSxaoqnxQ8rKpyu1DggO5dMzo/06P4zblLN3duv3bvkoU7S/p06Nxt8xB5TOsWT6UnNX4hb864tGF1GxdOyH954lPPPpuUy9m6efIHuH5NThrTnDRGmRrAcohNBWcyB1GiOWqJl1ayyP3ZT8mPaxVC7rL3b6TI3vdyOligrxoq8GN0MK4Ql3JgxOJPg5J15CdjqHZGzQ6O1mnJQo5Fov7oxRmX2pTtCszcu7ofBXS9i9/cvF6Kqbw4fXE30lS5Cwg6AEhtOeetqYqDQ8RM2iOUcwQBGunPTI0Oc1lizXjRgL+RX1DQ31AoDiC3/1z9e18209V4IpojdYNAcKiSj22IEw4G0HF/UO8eV9GaEsvVWoklvsNqLBMyqGDADNIL7QWWy26nKuEmcZ1MfqDtIavBZaDGE3GI4qDR9xWlSEMLYjURcGvuVhqKDNmwtdDYZ3DbF2KS672RnTsxOaFZk8BFjJ+Mt6MfeEVkWxUx1OiJhZE2sTAS+xdGst3GSAsj0Q/FH6BRFrwdD31m/kwATL9Dldw8TxRBv0XSsF2JuU+iiVOD6kmaF6OaJCEDL/mZucdWlxtfOrFx04nj5E+n3swe0H9kdv9+WVgeVfLu2Z3dt5w7t8Mwetr0Mb1HTZuSDXxfXS/Nlg5DPBwMBTDCQTQB2OMDAZTXlbfADReqP8Tr6bWK6kAAMsJlfBsATOLy8JqhvgDKFf4eFb6FAP7e23g9MsJFKYq/R+CA8ffkACjfKcf55xfx91yWGCRghEvQEm+qeU8sfU8sfw9g6EjmSbNpfF4H4mCwGqixIgNZ1QDLONa+nsXnYIrlSNZ/qs8pjaW7tz77FiYZjdqqJhk054ZV7/C4PoWJL+6JGmcdC8YzJo/O9+DPjp6/vXVye1+1Dt49Yd4fzo5qOHl67rBtf7ryzlsHcnu/gVpTr/epZjxj+E8A42DOwbbALJGB92TKuGo2gIbFPJH6rwaDr1ZAyNYL+5PFAL56WilWcrHtycovKFYyDq5aEe7903ufS1Olo95eNtzbe8yBz/5+AF2ORtlki1K6njQu8n6HZuOPAMFQeF/6SB4FwfA0r58PDJF8hQJBgdzrlqVAdoWCZJ+kKxWqUQ7iL9KwGitCaQg5ETIiNBR1J8dmoW6o2yxyDHWfRQ6Tw/ReX9QnjxzkB1Kah/qRAwASZRa/SSt1vgUnxEBjGKvKTZpyjWTeLjvGV4gFXOJKRpg4vuliVzxmq8cpJJECQbMB+yA13p+IzGgvafG8LoVnTIwOq2JzsiQFNirJbuSopSTvezV75apTjDd7e82LK7YsxVXNXsDJY3dSarJkf9r74bA5D/nJz216cAaN688YtPk7qo+Tu6N+XCEtyaEk2tAjr1YVtmU0Wgw7AeRMKjeh4GCSz30DrXmHyLUUfVQEwb4CX5N2y0TPlcAMEwmYsYlatMr8FqvZx51FWci5+t4s8usX5PuyMmRfuXUrrVUiH44/9/K5B+QSvdnB+3HR7LwixLKyNFM4wWCBJpRvEtu0mWhNo4TSSf9tJsjKkd8wxapl8PT1ojHacy7+HIONGokVEzUbv90Whe01VAdt62ehtuYgmFFHz7WyQxfm9zgx6OqRfofjm7ZcnDIxt/vJwQXjhtyVB1d8886W/KudkkauWtJzi9qs/qaYZiOeS85avazf0GsDRkwkH4IEvau/NcyVe9P5pUBruKhiHjkwB6B5BTs+8zieWSS9EynSDvzRMhzJXZwQxcmzjpR6E3IthHoWTpFvE8LZIBHai9P5VWk6fXH6tXS6F8YKmt8Q1YYV2iubVrB8ZoJgB1OpLioxboMujIuvjeOcnMVj11g8aRSTrg3qHJzQwwCK70nlknafr9h14ouPPpkybvzyY/88Pr00MePt8Te+9DYyvr12zZyEtiVVgV1LEv86c/kEqe/0tWYcsch2aNCIt4qK3x44MW9KP2vh4f79+wwm1V9NLz3dM3rJnHXdU7/DU/r3ypSS9xVEL1wNgOFlVlFuaAaR0JT6x8ZmT2k4fWmjCqh1PKP8ExvhdY2+6kczv6XG6RBHUZCQhULu+opcZzzD75gsUeROcnOszhf+S8m/zfxg0eJ7c6Zee+XNOS1W3O12ZuHRZ344cLLbOBxbMPz17bvm529Q7ORX8mJmiXfVK58uWv3Vgmnvrlgz6tVhLbekFrwyuupfT7fudnrX8vOfH2N2rQvsl5+Sy+itUHBCb9WoMeWNPPIwMsDXr80F6/EU4nN7Dhpq/Z+DppoHHdoNX5iFHvpe5oe35KeqIqS/ebdqzph2xEOOoXTulbVpU0V4C4yMDA2xeYmyAI5xNlk85WDJPAIolZkRZUeXyAbwYyS4dG1iXDLfeDm6K+vRXbVuvXDu4zPGZg1PgJtaMz8x3AJbNaNr8Nnc1JRheZ8VThnRbe7Yd+d+umrcoO5zR7/nyUaD23RdthuPHUz2p7Uv2EUJBN6CJmve20jOlJClrrVX16K0czn4SMzdw0dyvH3rfugBDGspl8D9GK5fiD+b8v+eQWB+hEHg5gwCT+65xxAIjFu95Qv9GQSRAAqrIrWCEybq0iiPlInYeBkwy6iYbPwW8538qJSlEu9dpXD43Vj7sJOTpUwcpA9nPa9qO0PQC0scJ5l9Aa+CFy1ixUH0iD86W/UC/ogy/laurAJWzCbDShRHPkZx3pXnAMEmxgGS0/04QHWewAEqK9MyshsB5AyekR0nit5/yXMqxbyrl4HW4hkoHnPacI2FFAn0tlrNDkhX1YsMPh+fn60kjdp0emJZ2TC04hPyLPryK/QeSZLTSSoq9/7Le5ONLw5Arsd37WFiPzIxB4xCuO+G+FlAQn2nREenr4LX+qHxtiMcrOK4e0O7wkswjSlpdGDjkZH8xgrU6LpLPQbkD/BeK8avN8lvgrf7xoSDDADB0F3XmSbqkd4gctC/GxM1SRW+Skbeni3Nzoga2gAmlZSUrVpVJo1pndfa68BvpuWl4c8BwXbSQ/4Hl8/nVYPN/vg6kUfdNosfY7BU1vvyamgYr8O3hPlS1ZzpyImOKSm+IjX5H/s2t04Na9h6iTeJFgS+R5nz3t1llo1hFV3kCZXraNHaenkcW5vXSQ/p73R3j4BsNZRp/39kX/HFs/h300J1tDBOTxwXuSU+9pjDqRsup5BxUlZa6Iyr7xzDuzbRUbvaL83JP9CPSvzGtyuuVv34x2OW4tBz+JeC+a9V3aKyj2Fc9TfGQN6pwgWvq6hBQ37iTKURFYLQ6Vbx39b6lYaJPgeEcX8sQbUJ7oXjSS0uQvTuNIs22IaK3eZkC7PlD8uTFY1kxDsaGQOrStVp28lyVEC2z90rdWYVy6x6uXJ57tjJk946h9+1r0Ph+1DKfmQustEi5mJvVb0weWX4/Wvk0s1v2O6UXf2tEei5i4FmkAzrVENKqi97G1/Bji2E3UkgRgikW73Pxs6lMYj7XC35VWnLBDVMbwx1THnVpr0ygl/xIEKfDCp96uGG5nDyY41b5eT+6qNMuIY+Byt7zocrl15p3e781GtfexONf1x0Ynb3pT8tfi+jzaVF98ivnq0FS7duW7Z4u/zUqHUOHLYUu7eSpTNHj51Ovpmx98KklxdOHT0qF7UggUc/+Mv7R+7cvv3msoj8dUzetwLgBQY7z3ZLPNst0kVFIRH0jhGkU2vI0XbzVlS6vdUAZ6Oko/Lbe07ZVwZ/VJnlY6ArFi6b0TBMhZhYvqNW/Lv+UIoWsSsJfkE7CFKmiElhhTUMiE1hVYxG6rKlJtH7DCZ305AsliW9PeQLclb68cePdhS0TnCUfImao9Gbyde79nwcXnXtpg0NRZ1mGhFG9dMjCkOHkMXk4IAL5PSREqR8GHf3r4Cq/0p64BN0raIgV7VFx9Ah6nIrUXrrJbr9IsGFdxYUM+BB+imynGN4BcvERAhpjFozkZrCiekP195oT8JZV3dvbJ0YFtWhXZd9+/CBba0GOOKf3SdflfZVkl1HLatDxw2X5cLZu07YVwe9+xIAZn0ClWJDGjihIfSnaSG3z5OLq/g3xbpqeKjMfWnOWg7VnwEmHHFPrtxlqcwkk+JwGvX1u2b5Vx4sk5/XIhYr/31TVuYu8ls2OnXtJC/iPX1Vi5F3ozbXRt9A7fZvMr66kLzTev/PMsLIUVPIG4FQDUu1TGZZbxedk1Wzg1ZmB0XNF9v3GGSrz06EVIhRJ5tTrD9r1TcVo8OfvKrpLHNFry3p0nbdtW7UF/2Y/MOza0XBrj0Fy3ZzB3RZwOj55KOkZXsc1AlFSZWUx/qhx3T47l3Q6igNkQYMEdBTDdHtPhY6VItQcVrfHxpGoRE+ox/AToxYEmtnI7ZRQ2vAj9RXTs/ecvAc+vFmN12N5Z+Dl66+cT3E+/IlUuWQxVJLzvlTwuVVUBeyVCOvN4InUBEFP+yRiNcewNfdzqBz1cDvaBxrsfUTA7YFGqC9DU5RwldvLZVryYAdO0bKqw6tlquO61mBr2JX10mAqg+RHmiMnA6h0EgE3gUfQ7BtSNA3NGbv+lbJTL26Usr95L2qplGrWX29/FfJYAAIgGSt5o86RjQtYIw2UkdSkVnAWbdUYbVrND+A6LVs4ska/gzvBEZDmhRrkmTYsG7thp+nyt8H7d0bgkxcHuQv8M9KNQRATG2G81A4ikb0s0FGfMUq6PIy/yvJLrmklCR0Zt1WkltZrAzcG0S+R5YgQPCKfBV/oPwFQiBeDeRWnoN24RLKVANrs5jcEaZKwNc95mHuBH+wg/y4s6hnt859lL/MWb1mduc+vbuwGgP5ezROOUdHV0fFgcxZ9KMI6GgBK3wsgME1lRMwRz6E3Ya+EAg2aKJKdp67krQeyJJvGdUMI8rkD/IA2FLD8OL0KoWPjuscds8dNjwv71geOdyhZYuOHVomtlfmD575h/0vvTQooWP7Fzp1ZquZSPqgN+BpMEFzlYJJvioVwYlTlYcw+5FwU7QpwSRlslQCjfn5Nu3rQIZeTs/t3SI5tPPzQ19clPfUsEFdI+Y0Gzdo6MantWzRHamN8iU4oQ2fCj9Dh8IDogMwnwzvH8wkPVxA+G2196h5dYpsNg7GRGGOO7TJG9742eym9Runz52T6Xo6Kym66TPKvUmLbG1CM1oaJy63pVs6PgUYRsgVUjOlmrNoWjHo4EkpK7br8CZZD6MhNkwjfdJYk8+SkiQXzrxG/rVn8oW765Rqch0lkOsckyET0Z+rD/N8bTKbb9tgkExSjNRCaispmVqnk7aBLQLbBvYNzAqUqeAGoky2y0kmXmbl1CVtKT+mxvd5eXT3Li9kdev5wuDkzi1auBom/rNzdlaXzpkjOrno3QaJyYC8I+Q7ZI1hBoTxWnYq0IAyueTQL2QamGDMMMqZdEoq0uisoeDTOncqk5w0Xzta7wzUo/OwHsa1G3v3QvKdDUpUb/eEFwe27htM5dz7NNlOrNV/gABfn1GjTsCVGgH3Pq1J+E+agLM8ynZcIK+Q4qAznLkDPd9ryx5bhQuUK9pjC2Hs2LZMXrLklmi2wQoBEKsGBAaJUVEUE8pAnz/EYgZO7EtORWETMqVj2QZr13mrl8wYexkQtJAdqIsBhM/R+3Iq8EaO+r6qBsOG8ZnSUZQtO7ouWLVqwehLgKABuY9awWEIgCjf5/yn5qwrxg+TPKPI/W7z3vjD6DHldJ7j5Jb4OJ1TPOwJYLmlPagDzy09KzvwIgPQx/eGsMf3ogxgUtSA3MSj4We+xi18NWSM6qhQa2B59Ls1qSqVmWXQjcMpDugjeizLJje7Lt3g+eOkm2359UQqtQiWYSeOk64yNJ1mnMN9FvFgUG2eUujtvCxn+LBpU0Zk5kjy4KmTMxsOnpIzBBBMgg04RjoMBparUqjpMyo1XYQZNsAaZUYhvILcQe4VOJ5MRwut6DWePVmPw7T3cbmVjMCtH1tTZGe87wfITe6sRJgQ6TDJs5I8tBIVAqJ6PEWaoMSBBIHsnfyr0tzI+eY4fGncFNYCmq1yKl6Fjys7JJqxA8CrwCpm3/iigY7P2ZhGS7E8i6LDUR8BKRrX5SBF4wQVdGxAAZuoASaYejfm5LDGvvq2I+H2aHuCXcrUUwnrspQNT+frmz+ywMnCgjaGWvpTPflFYGOxgNIZK9nJQamW8ynt3SlvLzY8pH0a0HCyR0b90e2ONdzPTvlL8o/WkD+P5i8BhbEmDam+/vEuiKfrclAH5osOmB97Uux7aQpx+lA1zls+FG6LtuFMNrEGCQzyrJPgk2ObgA1GV1AIlVc28+ax9RMoBkppRKz7vMyDoXCkp981ZhiMGu/k9T3uwIiHXVrtHI9DPjwuhV4YHscubpeSlBLbMMmNUlzK4E/o3zlylrxw5g79O4P6ocLTVdmoVfZdbPsTuUV6zpqFPx0n7V+/Zj1rpcwu9CaWvVVYrqpYs2bN+iNVD7Yw/d1FPVeJrlw0NILtqkuruncxzFqgn+oWsMb7iqJ3ovw5z2JNXpRJJECryqMBkxpr4x5EbIK+dD2qpre7QyTmIl+1i9NX7ULp0i6NOuVM4theTSdehdASGFcy6tZ57suFtgeXrnjQnPLvbIVl5ZUvnCkoWLyQRli6opijJ7H3qlJ65ggykN/JGyuK1q/EVB93V38bwHpHx0MqMKs3WB7Ir5+hh8Z81VzghqbQAlIgHY5C7cLU15ck+jeUEiIAsZ7GZqrHAV6ftDFpSq1gMifTuwLK6+Yy15TDeTame0zmGnEitiiciWyZKYbB+ETJpij28cmMpaY+E+Xrcun7TQMjbWshuSR+4QpLH7Wy57j0pcWyi9XldKY1ZAeU5HYb5cWo/6Sz09eWJXxF/jnjwBKycMWBmeTn+wlHXp9+ZgoatGTbF6hB2iHy0o408quUsaMZ+c0zNKRxdNVXgw2RjVDHTKfTKd1C90iD9efWkyj0ObvQm+wRdK+q/Bz7IzubqBcdzjNv4fr9cnKAVQ4CKCU8LqgHo3WC+m/rRQUoUs8NVsw1sAXoY3o1nPNgSsPZrkAFjFeKupluIoaU03QavaICiMsO7JY9Y3LISQ9a6kFtcl9EHrzjLTn97GnyJuo5bzaqGkmDj4sURD8+82V8wNv73HnOThrJ+xSfBxcsVu085hV1TjRNrkAH103BigcKVhxYJMy0N5wdmVWKpvY7Ojo6IVrK1FGvmH2P5lxJhx9BvxbWAslngSxQU0dv5ARxqR+ZLx/aMWOsbfbsX8kXBpX+BaHIf01YbJs85Y8HDWgeY4vjyHdvxG2NQg1RyNyl+ciAoqO3u66eyF8KMrPWygmqPXUhClzQCI6J3QXFPsfB+kSf2qAR4ghdgjq1AeWjQQNTg5gGUqau9Ri3G/TpSPZ0pCkyJpJNvfbp2ApmaqbGolw1JlasaYjhBObIGle6PifLN+BZkwZsTdkjFvYCvjkwqai10yncBNldTiM9GGKRm64UW69EFEs7dKIdZy7SP1z34Dep374r4XP3J5LlqKPsnYzXZnj3oqH7vZW4+4ASsps1FJNaFI0o+nHh1KLEZkU/o6PJI4qGovuDmMQ0AZB+pSsXAWPFDV/c0uoKeBtilkMbcqnkZxzYVK3cEoclCNB8oI936KKzMlIz62ItudxsN49Noz1S6EEq/7at+Urz9ZafP0TffeH9Hv2Wv9nuPdkcW1v8TB4kSMWKpd/MEvWQ93wIHp+PJg4vORVQAghiqr+XI+gcomCF2BBNBBmsZkUDr2lExXqmghNl6mdVt8LntDhZUwwtoeLXv9lewdQhlM/Qwowgm6cisBOiFLPWmZIF9AbOFGGpkBR6YVXwdqOdXsypFnOKHIFXkV8O9J30I/07U0n/Tl2RpNE3yKWdFvx8jpqzgV7QUFI9XZ2+gV68H2NkQoFDfN31v6HWygnDVahTV9Rz/9o+cTsVay2DuAUAgQkSwt02O/O5HGDmtUMsK2nALNywAHWrcfUDpHhwyWpP4RbskZDxE4+UG0tWkLtHL3+ClBhvMi6PJT99cPECikST464A5hoq8SqUaJgspiLEhKmB1yizNJwiCJzB15jhUHhQNKP06wZs48/a6bMmdmpDxF63gu+jteBjalTbDa6KHDx9jf7hul8jC/ntn9TE9iEH0fObtu8uJJQVTb5D1pKlxfjO91f//AAtRfFvLJ9XjADBblwgfSMxD7yeLk/pYBAc8mM1f8MovrigiHe6GYkGww8MydHFVJpjd6it3FfGmTVR1cMg5sL4rvhgn21dJ88b3nPYO6Ctp/Qe739SF15VA7RePwFs/v9THxSepXosG4WL0v/fDiksQ1u+b9+1k1P3Refnzhr/0Ue4W1kZ7ZQy/HB5682JEyeOKKximV7ez0X6is7HAcN1QGeUWOIu7l/iMC3+rXCNgoNsYCZJqyLXhuZ6iJxTprzUYm7Pyw8eePbtQ2cOjkFNPcoo242JdGx0qH9461jr3xsBINgir0TrDK0gAELoGLVTJgTiTSe2kjwDDK36j8pZsqDXW8AYpfTwg2QHA6ToyE8O/xaSsoIeoZKWYsZdFWmknESKoD0A3ifFPJ4b7vBPotgFbrjNHsa5kGG2x1PE2Zf+99zwxzLDq3/CG+no4iFXHJb46xoaJXwu6+Z1ZD6sgq0gZfozwMFYwwDHIgPcj/qtRsazLMz/CQMcXf03DHDM/HZ8XLI/8osajn/zixr4Mb+oEWzw/0UNKkSxbkQjDrMR9504sZgsNaA528jCT8yo6YI9e8ZiA3Gg2PqAoJBanmAp7om/dyMFexfiuczeSFAit8VTDNNA4h07pold/msgsgxjH+NIYw6DyHhXtSMZuA8eiSWfKWpr1nj6GdAHRgJj8AcIqGEo9QCMeiZVXaOelG90GUVk7+FJQgdP3pu2YHTXjqOyO3cdPTCpgYsDfIZpx/7SOXtEty7DKcaX2LJBfGJydXXNr/xgA5g5UtQQQP4r589Gwtj/7hdsrsmIcjrYYYuMcnXrxmpoQeh1pviltErr+8ycvuk3baDHiJ6s6ze1dpe2b9e1/u5C/nbl41/QV7c/RRF4YxGeV9sDHG8kErL8lsl6gJPo/7fmgoD+SawHU12YANTREvJtgv8hMpESmD8Wzg52E8dM7EIAjypUbKpp8xoioER1tJ6kYj8bzcDTABTPJQ+EdlF793pQXfkGuS80jZJvFBUV6bqihkNPHSfmkU6R4UGYh3JiX0fOgzIwT0To7FTh4wrxBU/hfaOlvQ9O377NmqeSZg+ktKorUloR6lhSQk4Aqv6R9vuYqrSFSJguNEvQ7eBibw8haEM+DF8FBWXqx2EWFi6A+0yKj3jH3F/0/zV2FeBx3Ep4dN7TnYOGMzc5s8PwHEOYmZMyM1zytYFXZmbm1hSnjD6XufUXfFRmZmau69snjeRZ7WkLHyS2/N9/o9nRrDSSZpRhYA6QvIA8IHW9uUA+/bQ3G8hrr+l8IA9fnerUwQ+25OqHL2bcdVUlhci4ULW0bxaBWWwMq4eYP9lvsl9UFKcMQB/JniA0jYZkfx+6ntBNsD2AeyA30eWEbofNbILFPcAx0Lyb0An4VXAXpHFnOz90lMj4KfFfSp9oY8vYdOsTA/gPaKzeJ65Qn4AIiGt1rFy0H52aJSsoiPYabD+WPef+LNqxTkBkmmgfqnQJ3WwGxMx7A6QdG30kOy8APcCHnkHoJrgiAJ3FTXSE0AnYJNAFaegcTzvuOwJ3KkozUsnu3kz8FMNKhrU0HQCh5Qb6SKgjNF2PSXKFdj8VaJRdo5vcaQHcUa7QLwn0PpEIoRPuGk92QvcRsseU7CprOlrOP7TldLMJtt615WCuc7TKWm3xK1ijRtNBimRZNBh9JHs3AF3uQzcSugk+D0JzE11J6Hb4mE2y0BWm3LyH0AlWIrgL0tA1Qi9jtF4w0zOO1vG6p8Np/JHPTMZQdht9JHuY0HSoIZnnQ9cTugk2BXAXcAPNuwmdgB+80UroIiF7hZYdsw2jNJO1NOcQP6VESPbV0mAe2XBKoGfrkfcigEbT4f7ksEwLrbkPDEAPN9EcNJpD0+EBWGYyf0HY9oRjYUf4sJtJigS0AEBBGnoM+6FjvNQJSbIHfaINfoS+1idGCC3W+z6xD34CPZho/FK075maJXO5iva52oNNRQ+GGUhRM/O1HjeTZuiAbjKOmrHRR7IdA9ClJpoDolGPewdgmcm8mZgTcBHpxkNXCd2M0v5LppQ6JCxHxwXIPutC1+dhJD6sJbkKINRgYI8scX2+S2K5wrpPC6zYl1dY9F3Vrs0cZQr9qEDPDm8idMLdWaAL0tB9GfkulUEQLWaFspj9HEuWPMWu8vqhvlfqpyOk871PJXpQZjD6SLZ3AHqwieaAaHw6hwZgfXJ8Qdj2Ax0LG/dhN5MUCbjGe5KErhAaGaE1glnKUO7ddC+3ktx07zaZg3Lb6CPZzoSmNVQy10RzQDT2cl+bGbVNzJuJOQGXeJITulBIXqYlxzxaKMteWpYSAJ/PIskJvVmjOSR2Ina8ByCxBYK91JyN8K9o/rIGtrIpkJtWlqHfG8bIDz9InmjN6ihizctOwzQWmSMDiLkFfmANFnN/H/MrihnR1wKzuIcLNFbqSi3FSl35UASHBGx10L4h6chXYkUe84lkmPPm7GfkxUpxik/X1co1bqPkx3oLIvoPATXgDUrxT+ib0Mhq7zjQrWerQl8bRY0vWd+LDgddspqtlyW/fk+EbsU85amlmKd8JDTAJX+Wmpz2Ant/GSp+GZqD+6JqJdAZcgr+RsLyoSKNYYZ5tHGUL315rZm46M/Tl6fposbLZl45MBKUzbzMU9A5Oq95pHp2UGJzT1/f6BTnrqvqi0V2UrNjHAVb2C4Q8+/3JOP6zY1ZxXHMzNXoWhozahVK7xDi3oW4m+CZIG5ucHNAbhztkwOYmclcRMyt7K4A5grHlLoLmRW6JEDqShYsdTN8xHa1uMv+QOrmlcxiLtfMWCMNZ9ZDNHMrm2nNkko0s9h7DA/nIaiGeYh+KuOFcK74ufMbmfIrHpdxCvGP/GntvU/H346H1na+Lf+EKcGWitbOp8Xf710a3ycu4vv7Suw7olX+s5e37uC/0bpjDVzGFkCuMRMnT0Jv+QdpRrBmT/JRdBkojljNHCkm5hZ4gs20mAf6mF9BZoU+F5jFXebjdoi7la0LWFvlOubcpAu5FXoSPntrboJVN29NLcXacSVwlOX99Gl0XzbgHOsKtDpsWaxDiFR0NeTLrtfH8xX5XvJeqjGX7g99Nefme+P9+p69jPpzNLzPOwxL0eENgdShmKO+CkbCcWCfEMFXruwErRrwLgIec46SkJ3DcvAE9DBxGXbY08OEMQ32upNjnk3vrFLIYv8N7yoeqU3rU7Wdxr43iX3Gh3PXM6+X+7+W+tGX0j7VpRPaP3Z4PXV69e4OK/u6zExvH9qgktsHrMeb4TY207KZbB48923+J0u3GBrTWIEPvcVw7eO22Z6I1pCYwR6ZFyoftxNY88caH/NoYm6B79mukOtn7ijXowKZcQwt1OhTaAwRd0eNRBN3EXG3spsCpK5xDKlxDC3U6Fqw5R7RK3ePK2sSKm4QfottTLVR3y8nlk1sOOzql1DPcihKgE9shNbrtzTKqdYMRVBwXh6ZLtCLNHoQmw6ZICYfHTHF6D4AEDouMooiFe3uJDbHioJEVJ/dZoHeN/yZWhsguhxCVp8jTKHvF+hT+G/EvcadQp7UO1MU1pI0CfTB4fuRW6ErgfvQhQb6C4GeGSkm7hZ3FZtpcUc0+jmBHhp+GbkVejmAxa3RUJjalR0T7lDcwGHDR5mCozu1lB2KT3Cxat0usbcJvjMjDsnRCoMC4kJ9tc08IN5evwpPimhZESs0EiTLhWIevQArfy3G9iXsW2yvExZ5WqROsI9ST5CdwOo0O11iTMY4sstbB6HxaO3XK7Rb675irSNytCy39rjhMPZytLbIK9AiLxSW2g9H41Ldno3tG2TtQhx5Y3S8rJqNtWKbUT0nktfnx2HccZlGF7KrfJYyGFeoJIusi4jc6jtX43fu0uPKPP3Igu1uN7arOopJLYvEv+h0QZY/FoPM0qru5CFABkTuHM4VP3fGo3KqIP65Nx4dHRWzhLujYsYwOjpVlI7ufDvK1t2/T/SI6MnRjHX3Ph19WwKWRuXkQX5iaXSfqJw8SIpvBJTmDWYfWtmjPZu1BG0clATY3thzP43lcRTxO5L9yOp9HpWi1rTGTuEaW6H3CPA2MU+fsgaj4kZ9PoN6u6DHlbn+FQu212K7kqWeZGlmeazBehMMNP0KB1rvNx/PLEnyKZogsQ7J/ZS7bzgPuNyxMSKC31BEcA18yqZBri8iqGc5tBJ/kFbtaw6m2RZt/QzSWGSOZBFzC8tn4y3mch/zK8iMaGHBzOKO+7gbiHsjWxUQx6yO/iBut5n8LvFvhE8CYgjlmT90DNafwCqGaB/1+omfErDzUOzZR+g5tI+dFRruB/C9uyR/lraPW3pcWSFRcaMdHIB2sLLHlfn0kQXb3Z+xXclST7I0QxtrsGQZpO3jACHLfzkgC9rHy8ySJIcpLNY8ROYG3csLWaNleUN1LzHrPvZyF41eTr3UqfclOtPkbiTuJrg6iJsb3ByQG2chewQwM82cWiwrNSKzij22AkiO1GxZFUBxYPte7i8S3+MSXun7SNTrPj0u4Wk8BkjeDHey8Zbkw/9A8ua1LF1yiu6OFZJcjU++UX/jwfiNmT2uzP0v2ndV7bAZ28eKnhIee3QJgMSnFoeuNfDHwtfYjvua+DwbteTtAZ6kv5IcKw58wY8F+lZ2Zfg8isyXU6y9HZ5kE6w4fr5jRrm+oIhY+56O9daLMTOK/xUxr4EuikARc0euHOfE/CAxr9mb/A1lz8uRWJJ5ADG3wNdeBIp2d/N9zK8gs0KfD8zijvm4LyXuNraQTbf2HvI5RdoUP9+D+NvgY+hrRf5ijvY39B119B0b2Szc37D2TjqKvO9w+oVd+o6N8A76NCtuiZfL8H5h6nis21kKK8E7GbZD0LqLMjYVysQsnU6uPHnjX4F15KbV7s3mPG1BZRX3PO/063uXUEvzzSqfZVe8N3HdvmrZtN9KZt1BFdGzj5wJdK7wT9ItxcUv8az05eMf3PrTacfFBn9WDta4yfHfwy5L61Da1dTsjOe8NeFNxv1UWgJenDjIV7bCdVVlURyjE/WscjOrT5/z074X1qBA77KHRleSz6XcNMmBTKFxzwu5Jys0XBa058WN+DEHih83VREzxY9jJjPvJuYEdJF9evOlLIfsU1XjxDfoFP22OJtkodUSzbCwbgO+W/bW6LKAmH0/fLdobv4LcbeyIwK4sx2Tuwu5FTozgDubGdyReuJuhptZg8U9kBvcHJAbvf90ZjHrp6NyAeKe96mqj6HtdpSI9kcx8xiO77M0+jhAbtPkk9O0RjBLXuQkgT5d6+9Tdoov6ie5R2huzOyE2j5XoxusnR16k2uLHUcWOys0IsBiY1HDYpF7D4Vm5wfMhQbY3LqXjwTMs/Jsbo0uDhoNJjfvJu4EzvEL0uQu9vaMNf9m4k/gfmSBT3YcEx2D/mCXeRb8GrCO6IPyW/s7An0B2GMuO9NbUU41VpTN7nz3VXtnyovk8hUoyVitm2tZvbUWztaSYDU1lGS5Rt9pr2goar5DapXcg6FzLDewkwF3clKr5K4G7Q7fAFsBtZJqdx5B/GRsv8l5BAD7H5Z1YrD/2B7ewT2AtPgwafFG5wE2x9JipqlFfgayKPQCyLK0mOXzieXE3Q4XsQmWT+znmE/oC/KJ7WWOD0saV5VCnTu4tI9yOBk6YkYO6T+vATQwJk/1yX9yM2I62U6W7xScw/tjGcj+HP+MlxW474Bf/7Qq7xW95UPrsL4XlmOozatlXnUv545HVSVRWVQ09SuLPPTo76t7i4o6z3WPwnKiA2RxUcbFObnfb9GVRdXc+r/YV4z8Qw1sZxtCc1kEZkKreyBEoXP0YB3BzwFwRuOzH4bPeLt7eupktKGlPhvawE7QNrTUZ0MbYBO235razZmD+KEaPwH6yEiowH+P+Pm6nQP8H+dLiG0AeAFVyIlBAzEUA1EjafSd9F8ApbIGcr3Zw/Ja6+t6vm/3rCXJZSo7SApPEpDdC7SinPG3dkFRYg6DhDaArzJJLFdQ1LOZGNtEcjIz2RQ2QAUqt626tEoiK/ZSR5J9xMzc9zDQItDftdSC+w9Alz7xTheekvJReeozPUxQQQjjcqJ/+cSLT+XVHgI57X3miegMwgkKrPUDInsISgAAAAEAAAACAADiktOWXw889QAbCAAAAAAAxPARLgAAAADQ206a+hv91QkwCHMAAAAJAAIAAAAAAAB4AWNgZGBgz/nHw8DA6flL+p8XpwFQBAUwzgEAcBwFBXgBjZQDsCXJEoa/qsrq897atu2xbdu2bXum79iztm3btm3bu72ZEbcjTow74o+vXZWZf2ZI6U3p4f4Ck9+V8/0S5ss3jJOpDI1vM0D+oI/rQz9/N3P84xwTRnKQLKCpW87BvgxH+wNZGhqzh74/SnWlqouqq6qMar1qtqqJariqt/ueue4GjpfdqS+9WSunMDc8RqPCqQyM5fXff3FFLMO4WI0rJFUN1utRTIw3c4U/mdtkIGWi6P2mXJH8rc9uVk1nbNwJ4xDd++VyH83lUU6Pp5HGfTmosD9VolBBnmVXeZK2/lCWh/ocp/x/aE/1cDbiJ+jzjvr9FFI5jc4yi25ShS7+MSrrve7Sn9T9QIn7IrtPdlH+wNmFwCIZqO8vpZPYdynd/C3Kw5Tn8H8ZwPzwPocngRPDbxwfnmAfZXt9p7r7ieuUe8YRzNLzRdJdc30pneLNytc51H3FCvmcjrq/vkkDOoUVrAgP0FeGMi1pqPevZLz/h5lSlx7+O2qqqvqZTJL5rA9fUMvvwwqt6Wi9PzFcpLqfvlrPNkkZmicVGKZ7qV2YmP0otelg+ZM7uVQeZFHyAE3leqbKMurpvzrJ2ayK6znY/ckGGcV6acYR/niOiIu4UJ8vK1xA/0Jteri/OT/O03zdkX0cp9JHlmssS0nlJ+b7kN0cHuaKUEIaBjLD8uivYYI/gTPCo0zyf9PVd2Qq/NPVffdP+VidC5NqLHXr6K46za3hKP8y/f1bVPYP6PmNLPR9GazqoLFV0hjLWu6SNhyaLOWy/43l8kIvKiQnkspUusU3OVSO4AQZzWGxPl1iM71ezuU+aJ2H6vkiKrt/OM9ylefS/hlWs0RrdK71hnk9dlGpZC6Yv/w52c/m2S1KfWweLpY/OXtffXy98gvVq7l/N5Z5t1jmXfPnFmWeVb8Wy/2ZPap1W618TnV37tWNZT4tlvnUZDHYvzemxWXrbZHau3F/ulm8to9t0frbemyL1BxZ/2m+btM4zlHeqjxb+bXyRc3nfu6H7C/llckabgtvUmJzwnxns8L6VZpygfpuhfIKZTujn8fZYnyGs20Ny8/GlIHZ3VYPy9PGtFlj/V7KVqXsZfPHZsA2aR6yOVHMR/i/1dvqsL20+WYzxjxidcvnnM2ajWk9bz1uMVh/599uzPxflkObszbr8vrnzzbhBRqTaTB75O/mNf4PGySVPAB4ATzBAxBbWQAAwNi2bfw4ebyr7UFt27ZtY1Dbtm3btu1Rd1ksVsN/J7O2sAF7GQdxTnIecBVcwG3NncBdzT3IfcT9ySvH68E7zCf8/vzbgv8ErQW3haWEtYUdhOOFm4QXRRnRJbFe3EV8RCKXVJQMljyXxqVlpL2lZ6QfZMVk/WTn5Q75YPltRTlFF8UmxSMlVk5Q7lF+UdlUGVUNVX/VLNU2dVo9QX1fU1SzRPNN20W7VftWR3VTdKv1Fn1T/XqD0dDDsNHoNHY0bjE+MeVNfU37TN/M2FzNPMl81SKztLBcs1LrHOt2WwPbeHvOPt++2n7CMcQxy3HJaXa2dD5w8VwVXT1dM1zn3Xx3ZXdtd1f3ePdSj8TT1rPcG/D28j7zLfEb/S38VwMgMC2wNsgOlg+OCF4NZUObw1XDg8KPI5UiW6KmaOvogei7mCtWItY+Ni52OPY9/n+8U3xN/H78NyNmtEyBqc30ZUYyU5mTzJuELBFOkESVxJVk1xQvpUqdSWfSqzMVMquyweyA7LMcPxfKTcjdy/3IB/Pd8g8LwQItzPt7GVCBbuAiNMLecBJcCvfAy/ANEiM9ciOAKqNmqD+ahlaiA+gm+oCl2IMhroJb4gF4Ol6FD+Nb+COREQ8BpCppRbqRQWQmWUMOkdvkI5VSD8W0Kv1TEDzACAEFAADNNWTbtvltZHPItm3btm3btn22hjPeGwbmgs3gJHgEfoIEmA9Whq1gJzgUzoab4ElUAB1CN9EHFI4ycQlcH3PcB4/HB/B1/BaH4HRSjNQlG2lJ2oBy2peOp8voXnqFvqbfaRzLy0qzRkyxAWwyW8UOsjPsOnvHfrEwlslL8Cq8ARe8Hx/GJ/Hl/A5/wb/waJFLFBLlRFNhRG8xTiwRu8Ul8VqEiHRZTFaS9SSTveU4uVTukZfkPflKfpNBMlUVVuVVbdVcEdVLDVIz1Xp1TN1Rn1WUzq0r6Ja6kz5tipo6hpheZoxZavaYy+aVCTQptpCtaaHtbkfZhXaHPW+f2f82xRV2tRxyPdxoN90tduvdbnfJvXQBLsmP8Qv9Wr/TH/UX/d0sCRMZsgAAAAABAAABnACPABYAVAAFAAEAAAAAAA4AAAIAAhQABgABeAFdjjN7AwAYhN/a3evuZTAlW2x7im3+/VyM5zPvgCtynHFyfsMJ97DOT3lUtcrP9vrne/kF3zyv80teca3zRxIUidGT7zGWxahQY0KbAkNSVORHNDTp8omRX/4lBok8VtRbZuaDLz9Hf+qMJX0s/ElmS/nVpC8raVpR1WNITdM2DfUqdBlRkf0RwIsdJyHi8j8rFnNKFSE1AAAAeAFjYGYAg/9ZDCkMWAAAKh8B0QB4AdvAo72BQZthEyMfkzbjJn5GILmd38pAVVqAgUObYTujh7WeogiQuZ0pwsNCA8xiDnI2URUDsVjifG20JUEsVjMdJUl+EIutMNbNSBrEYp9YHmOlDGJx1KUHWEqBWJwhrmZq4iAWV1mCt5ksiMXdnOIHUcdzc1NXsg2IxSsiyMvJBmLx2RipywiCHLNJgIsd6FgF19pMCZdNBkKMxZs2iACJABHGkk0NIKJAhLF0E78MUCxfhrEUAOkaMm8AAAA=) format('woff'); +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: bold; + src: + local('Roboto Medium'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEbcABAAAAAAfQwAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHUE9TAAABbAAABOQAAAv2MtQEeUdTVUIAAAZQAAAAQQAAAFCyIrRQT1MvMgAABpQAAABXAAAAYLorAUBjbWFwAAAG7AAAAI8AAADEj/6wZGN2dCAAAAd8AAAAMAAAADAX3wLxZnBnbQAAB6wAAAE/AAABvC/mTqtnYXNwAAAI7AAAAAwAAAAMAAgAE2dseWYAAAj4AAA2eQAAYlxNsqlBaGVhZAAAP3QAAAA0AAAANve2KKdoaGVhAAA/qAAAAB8AAAAkDRcHFmhtdHgAAD/IAAACPAAAA3CPSUvWbG9jYQAAQgQAAAG6AAABusPVqwRtYXhwAABDwAAAACAAAAAgAwkC3m5hbWUAAEPgAAAAtAAAAU4XNjG1cG9zdAAARJQAAAF3AAACF7VLITZwcmVwAABGDAAAAM8AAAEuQJ9pDngBpJUDrCVbE0ZX9znX1ti2bdu2bU/w89nm1di2bdu2jXjqfWO7V1ajUru2Otk4QCD5qIRbqUqtRoT2aj+oDynwApjhwNN34fbsPKAPobrrDjggvbggAz21cOiHFyjoKeIpwkH3sHvRve4pxWVnojPdve7MdZY7e53zrq+bzL3r5nDzuTXcfm6iJ587Wa5U/lMuekp5hHv9Ge568okijyiFQ0F8CCSITGQhK9nITh7yUkDxQhSmKMUpQSlKU4bq1KExzWlBK9rwCZ/yGZ/zBV/yNd/wLd/xM7/yG7/zB3+SyFKWs4GNbGYLh/BSnBhKkI5SJCVR5iXs3j4iZGqZyX6nKNFUsq1UsSNUldVkDdnADtNIz8Z2mmZ2geZ2llbyE7X5VH4mP5dfyC/lCNUYKUfJ0XKMHCvHq8YEOVFOkpPlLNWeLefIuXKeXKg+FsnFcolcqr6Wy1XK36SxbpUOLWzxg/tsXJoSxlcWgw9FlVPcTlLCLlHKtpAovYruU/SyIptJlH6ay0K13Upva8e/rYNal2OcjWGB/Y2XYGIoR6SyjtOOaBQhXJEQRS4qEvag51P4ktuuUEzGyjgZLxNkAD4kI1AGk1Ets6lVSjaQjI1ys9wig6iicVaV1WQN2UiOlxPkRDlJTparpIfqRNGUGFpIH8IsgQiZWm6SW6VGpMxiMlbGyXiZID1ksBk0tasa+REcgrWbjua9k1ACbC+aMyG2RGONorqd1Ey3KvsMmr9WKUGrtEHZP2iV5miVZrPN5uFQXa21FgShu/bK9V7HCz4/+M4nBcnA9ltfW25z7ZKNs3G89bp3io+47JSdtbHvkX+Ct+dcfK7+Bdtpf+h+/o1trsvLQPQzsat2+pW5F3jvS5U0lhdi522PtbA9L6zn5efGkM/y3LsGAHbD/g22Tyv213N1GtoduwmSRzWG2go7BIS/cix/ameH20SbZFOJQFgyAFto4y3STgLhds2m2LIn+dtsB9i2JxWyA9hJ9fuNXeLF+uvtiB0DCWES6wxgl+WMN6zPWQDCnu6j/sUmGs+LuV1spo2wdRZrE4gkiiiLfNTvJRtgJ9RHpMZ/WqP4FIBQVAv5Qp3L2hFe3GM7/qa/5BWxg2/Iv/NsW7UG7Bzvdb0p326+Inb0PesfeLf56q+7BkDEK/LaAQBJXldHI9X96Q6+dVSX3m8mGhvy7ZdDbXSCE0YEqcn86BTP/eQUL0oxdIZTEp3iVKIyVahGTepRnwY0RCc6LWlF61ee4rHEEU8CiYxgJKMYzRjGMp4JTGQSk5nJLGYzh7nMYynLHp34m9CZz1YO4ZKfMOEQIRxSC4fMwiWL8JBVeMkmfMgtfMkj/Mgr/CkgvBQUARQVgRQTvhQXQZQQwZQUIZQSoZQWYVQS4VQWEVQRkVQTUdQU0WjmujcQMTQUETQWSWguktJSJKOVSEprkZyvhYdv+A4ffhZefuVP3WPRaUeiCGUEYwlnvIhkApOJYqaIZhbziGGpSMoyEcFykZRNwmGrcDgkfHDkP4WQhQ3EQBDE9pmZ+m/pK4ovGh2DLW8Y/0wRrZ3sTlWy/Ut6kPnlj7St3vzVJ3/zxZ878t9iVrSeNZdng1ty+3Z0tRvzw/zamDuNWXr9V2Q8vEZPedSbe/UNmH3D1uu4Sr5k7uHPvuMCT5oZE7a0fYJ4AWNgZGBg4GKQY9BhYHRx8wlh4GBgYQCC///BMow5memJQDEGCA8oxwKmOYBYCESDxa4xMDH4MDACoScANIcG1QAAAHgBY2BmWcj4hYGVgYF1FqsxAwOjPIRmvsiQxsTAwADEUPCAgel9AINCNJCpAOK75+enAyne/385kv5eZWDgSGLSVmBgnO/PyMDAYsW6gUEBCJkA3C8QGAB4AWNgYGACYmYgFgGSjGCahWEDkNZgUACyOBh4GeoYTjCcZPjPaMgYzHSM6RbTHQURBSkFOQUlBSsFF4UShTVKQv//A3XwAnUsAKo8BVQZBFUprCChIANUaYlQ+f/r/8f/DzEI/T/4f8L/gr///r7+++rBlgcbH2x4sPbB9Ad9D+IfaNw7DHQLkQAAN6c0ewAAKgDDAJIAmACHAGgAjACqAAAAFf5gABUEOgAVBbAAFQSNABADIQALBhgAFQAAAAB4AV2OBc4bMRCF7f4UlCoohmyFE1sRQ0WB3ZTbcDxlJlEPUOaGzvJWuBHmODlEaaFsGJ5PD0ydR7RnHM5X5PLv7/Eu40R3bt7Q4EoI+7EFfkvjkAKvSY0dJbrYKXYHJk9iJmZn781EVzy6fQ+7xcB7jfszagiwoXns2ZGRaFLqd3if6JTGro/ZDTAz8gBPAkDgg1Ljq8aeOi+wU+qZvsErK4WmRSkphY1Nz2BjpSSRxv5vjZ5//vh4qPZAYb+mEQkJQ4NmCoxmszDLS7yazVKzPP3ON//mLmf/F5p/F7BTtF3+qhd0XuVlyi/kZV56CsnSiKrzQ2N7EiVpxBSO2hpxhWOeSyinzD+J2dCsm2yX3XUj7NPIrNnRne1TSiHvwcUn9zD7XSMPkVRofnIFu2KcY8xKrdmxna1F+gexEIitAAABAAIACAAC//8AD3gBfFcFfBu5sx5pyWkuyW5iO0md15yzzboUqilQZmZmTCllZpcZjvnKTGs3x8x851duj5mZIcob2fGL3T/499uJZyWP5ht9+kYBCncDkB2SCQIoUAImdB5m0iJHkKa2GR5xRHRECzqy2aD5sCuOd4aHiEy19DKTFBWXEF1za7rXTXb8jB/ytfDCX/2+AsC4HcRUOkRuCCIkQUE0roChBGtdXAs6Fu4IqkljoU0ljDEVDBo1WZVzLpE2aCTlT3oD+xYNj90KQLwTc3ZALmyMxk7BcCmYcz0AzDmUnBLJNLmoum1y32Q6OqTQZP5CKQqKAl/UecXxy3CThM1kNWipf4OumRo2U1RTDZupqpkeNi2qmRs2bWFTUc2csGkPm0Q1s8MmVU0HT1oX9Azd64w8bsHNH5seedBm6PTEh72O9PqcSOU/E63PkT4f9DnaJ/xd+bt/9zqy+MPyD8ndrJLcfT8p20P2snH82cNeup9V0lJSBvghMLm2QDTke6AFTIsiTkKQSTHEeejkccTZeUkcYLYaFEg9nCTVvCHMrcptMCNuKI/j4tbFbbBZ/RCC8hguw/B6fH6v22a323SPoefJNqs9Ex2rrNh0r2H4/W6r3d3SJ7hnrz1//tVTe08889OcCZWVM7adf/Pcg3vOfi7Sb7ZNnb2MrBg8p7Dba2cOX7Jee6fhjy+tvHnmqCFVJb1ePn3qzYznns1497K0c1kVAEgwqfZraYv0AqSAA5qCHypgEZilRWZ5UT2PYsgNdAxLlEcNYjwKajQGgw8Es+JcAwHH5qETLIgby1WDHhpXgAyPz93SbkOsep7hjeL0eqNVIP9lTHKRzEmHdu0+dGjn7sPHunfq0LV7h47daMbhnXWvenbo0ql7x47dmLCSvrRSvDNw6uSa3oETJwLthg9r37v9iBHt/3lj9amTgT5rTpwMtBsxtGOfdiNGtPujmzivGwjQpvZr8WesjxPZUAYhMK1F/0qJXHRyLXWOAx0H50dxboQfxapphKtHGVUGHf1gc6PC6GkIo0NCsYGDIdUo5n9yHFb8Uz0qpyqHT8qpyOmZI4w2c1RTC1d7tc4anqdBGhkdmshNVo7GA2MF8+opFMrXcvAt55yfJNbVj8SKVhCJpBCfz+vGL5mK0yVjQRtLLX1+osicbALyzY/jkdK22by5e7c3z+x5acqYSaSkScEL3Xs8T9l3/Qc8NvUqY+SjNsv87OFG3YpXpZYUzytzDe7coy/ZsiQ4Yuzd/U688NSmCXd17sZub3v7oC2fjfhCGltW8VnjxjpZZy+dWjwpIJwormzTK79/iW/wBAAgqGEiyZKzQISGiQpWr1h4SISYUkm57FNqBQIBVkr3y8NAQ+3D36A4IWQV/JmZqJw2NT1T0Q3QAqTsQblg41NPbiqQH2Iv035kK206mGysZG3YMSs7xtrMDAyhTcjWSC4axqy4LiZRQdFdvnTNq1KX320HjVawZx6SCzc8/UKgUH6QtKPt2PKac4MDleRlMsxKBpFXpq4ZVBNmKyIxHbSvMAF1NBWyAQPW6z3nEIpfMhe2fL8kuIX8TClDEQQX6cwueUmTlNNpRPey/31uR/D0LuH14ccWkqFs//wTw9hv00gu+7IyEr8T3Cw2Ex+EZHAAktOEiPrIJO5s8hWcNqema06vU3PT02QFW/8NW0tWfSM432N9SfA9chuP5WOfkxnwHUgggyki+HwUXGw8M+65u8v3uexl0v7FyJpdaRIdRN8AAdJ5nYKQIGi4CB1U8zNNoUnPR3X1LjTb4EsQYnsMWACwJO6xk7e4bT/99GX0N7R2ndAo0jMzAOfHN02cnKkT94fv09bvr5QLAD8UpuJ51ev0rCK6SgOc3gCn19OKL9lADWokUbkS0ldBzwNNU8HdEjRXVGu0qPKIei288y5jBN59h9Cfl8yfv3jp/PmLaAn7hF0izUgO6U0cpAW7wD7NP3vy5Fk2o/rUyQeieM4C0DcRjwS+aHYSJiRhdokFkVRTjNUkvr1gffj25dM3f2ZXqEN85awnGncAgOhB3A1hQDSuhqG06+MGs+MEg0I21x4BImqiqcGk+kF0sY1xoc8M45pOL4mpgk13GVCnJSTTKXr+KSPXFgybNz6w4msqEctn537ZcSt7XKC7j1Bp9YE+E9bvXiU/S5K+eGzlJwfYcRkI9MM9smOuzWDV/+9pGmaYlnq9hLYFMjf0Fje13Izl5ntACdyDxkxTg0pcymnYlcImJDTWkK0ZcHQO3nrRBvWETcbdrEfVuA6VHa2IuhjrtnyGTjYeWzR1zsyJK7+iMpFevcjmTVuxkH176VX2rUy/Wls1d+3ilceELgtnTJs/d5R85OMrL40+Xdyiev7Ln15+Uh6/ZNmc5Qsj/CwFEIfj/jeANOgFJknoJonXwOrVZBeho02iBmkcTDlsEq4XIUsyjQo+3p84FpvOj7aLuIlTcynCvocf/qlml0xn/1WziWySrVR5nj1BOt4mXPlnKO1Lm0d5sxb3wsB8cmFylDcEVyexVFLRSeV8JAmXnJAllfClLUX8xpYRRhu0x6VoUYM5CS4WP7Qol4xGbc5ACRJ8Pr8v3WalWOW2FIsc2wbl3kECqXmlRfO5Xd/44pfPn2a/S/TjFRPnLl42d9J4O90m5J9jt9zYlFL2x6eX2A/nn5Us0xftWbf+UPvWQGEBYukSOQMu6B+nMDE0VnSsHA0kECeUCrz7ItigIy5ra0J7xQK3tGcqRoQsNh92U8w/JhEZmLktBoMe7bO7rLB0epebg632jH3uY/bP+ffYx6T9mVGBvNsWTF8WkF5wOh7Pcnz4lOJvxb4//z77iJSSLGJH3RhW06N96dRHXn5ww7qD0f3pDCC6cX9ugKIoomQEkXw9VczkxNMLnBCUCoruT0/3oxKL7r/NJmk/p7m+evWfGuE78Vt2lRns9N13kx40+4fnAD8CjMf6NcP6ZYKOq42NrmfDJWy4Xj1P+cEsSLLxkhUklCwkOAq4oqQVOOpuIs64nGxq0JVQz7ij5o27pAixmy+WM/67KC2ZsngH++XyNfbLtqVTF/36ykt/vrFletWG9bNnbDTmjRwzc/aYUbPF4lnHCwofXvLa5cuvLXm4qMWx2c+eP//PkRkbN1TNWrWa/j1u+eJJExcvjpzFAYg3s44vfRL+t0nkS3xjCynWFA5OSSRLynVkyecXVH67ol5PpINovJ8YLr/dnoHXLW8MFxXW7i3ZMSj8I0l96SOSyi5/3XNvxxtbB5aMDNy4dsmE9UtPPfNIx46difLpNfI/7DL7kp1g37C3GjV6NCeL/NStbO2ps2c2bD4CALW10f4qDgYDNPymcCtU8R4uYw/H8WnY1+/HcReOEKGKyJDmBj5OcRwItIUhwnqhFpJw9xFg6CkFlTYXTfVqZdf/tfIcAE0d79/dG2EECYYQQBQCAgoialiVLVpbFypuAUXFWRzUvVBcrQv3nv11zxCpv9pqh6DW0Up3ta4uW6uWCra1So7/3b3wfBfR//rVcsl7+ZL73nffffs7HTFBR5D3WpvCDmUdIQb1I01myQTjoQl2MRpRl/r3hG4oVpCF83Vw+kdwei2j93o4WagRrjD/Nw7YgU6IrsgAfQGRcYCTLxUZur5kPuL/lYuuNgU1XoSa+ueEfPon+J1yrD1J7UCC+5VG3BHBHVHcEcUdlSGKO3nPyzABMdyNFOv48MTEyEXCyPp9KK85NAqGGrz6I7y65gckiwz3dgAI+xivtAIDOA3LqyxbS9V3By2ZYgWxj1KxdrMPUEhIZKJWxzrtdWqXG6lJNABmTO6TO6EgZ/pvgvDn0c+vb5z6WEvxzh24q2xeXq9VAwomDR8q2098/X7JuWGdhg3GY64xvHvgZPkLaR2wgixCI1vHWKJpbdGx3G7mDCO77O7d6Eeg+9T6IJEoXP9qW0dDeSvNbVsrcjvaUN5aC9pa0c2ZWrhMKvyhjOgmkGUyEsFkpRLVKsh0dyc2B5YQICBgIe/NBCIEGNktqHxMBISRCV+50v3qzz2L/GNX5i4ra+5/7cXJK/oKktUtLnpWmZsBf4zfwZ/i9d7NYU+YMLgiIyLr7Gi8AA/zaQ6/hPNgCdx2D3ukdEseEwlhjDkuaOZ8eO9b/PGA3n2za6oggAlxCaLjSGGvi6/CKXAHfhxvwhtxbhtLaVQsrIM2+DLywL6O+mUrO6a7GfRIcPf8hNHZAIBE7VQd8ASDAWfec3ESdiGTC5nSGsiiwiLUtMnjuEOk1kzFcI9JHoR5kz0Y+SwCsXdhGH0VKhzHp/+FzFeRz9+O7fCtL2Q4AL8u2e72RcFosiLP9wIgHmY+hxmEgGJg84/lVDxnGtpH+FMziw5T/GGx/Sx9V+NPbS1/uvSGcm/t5vGnTEK3rUG9y6yEYO1+tfpYOon3TSpILhmHhztfw/bCn2qhobiwdDW+fQN/CjstfKZ4Dj4A9dOWrFx2S7KdOD56V0TLD0s++Qptwe2eLpq+6O1Jo56aACCYSGT3GbIfW4Kuj9KLgIabbN50LDdy1C0P5CSL2U+190OAThfGG/zHkIjP1Tfgj2ByPUSwrYiu7925+a0D27bugj/KF/F1OBh6QhP0gEPxrZ/ljc/fsONrFTee28R4g67DL2Qd3IERJIOHLwGln4cGSUJdTxdyhgDi1AKL4NMYAdkLvyXzDscv4Os/X3r77Nm3JRt+Ef9xEdfgl8Wb97668d7lQzcAZDjMIDh4glxAaHWfDV1JZj/rSS1tOuz1hHmUcIAjHG+MklgeL6F9LCbnn+jtWIJ+rI8SzjpaowWoDFuPSrZKXAiAE5+ZjCY9wHwiifwfvmXsI9wJMhnuBBn3B5CRXWYPc85tcJTWCd84gtBCVOTYSOfNYvNOJnxzgfBNCMgDJG7zSAeR2NXUTWzOuYmcC5VObFq7NxloMKYVZwDIYliIk59EGoTQ8FMi1WHihc7472r8D34dZmIIYUsBXXXbuXHroZP7iteG4MvI91jOCtgbusEO5K+347Q8e+MPb+JPbT/Gt4ZtDjppKBnYmi4D3IJyT8WxGL/UbqKsmPH2vW7kQdLd4LSKMre9bogIAvLe7u0GiyvOul0mNypGuE2h989SwFg6lJAPH3RNyQJYyWiVDLWO6XV1aHWtQn/HIrSI4vwGGfYxf74lFwHn0WS/ZYX76uoIKFu35IbrwlVyYQCxLpa96kTTx3OvJq5zuRfv5Pnw7hyqq8P1Z75rABK6Pm/yyAWS7d6fZ34//7k8f/ry4ka6xjKbeygnyTXR9CbFOhNBTIUiJtZlQleZiHWo4RgPKCvqPoxRivhqEFpQ55fr6lbBkzDE8TtKxt+gmY6VhGRb0QTHkw6dul8oThJo+wjtwodgwulWsMINaHf91LqjZPMpvyPTOJQPmKOhI8f8PFG13EQvVGfduUdgdUUc7AqJkgqDxNrKgaMhs+eobTNFT+700efrUV5FO30KebG5Uc8EWtlONUbCMKgzknfwPPyXDJ+HyXX+Mu77L9xf9q8jy7JPHHm3L/wDzYL3tomF0LEaU3YHPO9P/D/xPpFcNlR9sDfKQ0VIyDvYAkWjZCRQzAmOFb5urd0QeRq30fSlk1sX8kKZEurossFEhcHnyoTDl8u1YiS69x3B9zwSWwMExpGYerP/TAzKwmQIe+FjUFIzXI7/xHfxIdgdStAT9q2tfHHfu+/uf+kjNJB8sB+OIDdl6AFH4n34L3Twt98O4jvvXP/tEFB10nkWhzCCLoBffFVBMRMFCoqJUu7Jo9qcQ5WQhel6UVXuFrihDj12C/rgmlv4Xfj4imeeWYHfRW0c30q2f05/8nfluilTqH6k9PKT+hJ6GYEFpCu4GMj0BlevUyth7YJ7K4qXwVBu5hBhkW1IDMiHUy53QO1z+HbC7IyHkG/FrwOur4fAz/Q/oGEDoWEgCAODHkFDdtGcXDTnCMq5zh4tAL0r8H4kpavGhqLpIBNRJVTz83QOvA09Zkyd91RIxN025kVT8WEYuGH50hX4HMp1PC/ZLpyZ9q+OkeWL52TMDTFb1nadMXVp5dSnJy9Q9tJwohNfko6pURM+HNWSXLSkiJtbsnyG2TXfxfFwS0N5+AN5LeLfk+CaalbRx3ANsgkVK167jf+BYVf/gGESurZtzbKynQeu38YXb/6EX5bQb+9sXLEFzhw+vX3GF6/ZfsL4bXnqqum5OZM7pl96/eA3tz6Xly0pAhAEAyCWMjs8lpcL/M4jdosEtVlJxXhgirkUP1GHnxBHE/PJKN6sVGi0nNDoFpObCZzc5HQCL2Jc1JAPCxfF+1idfOgj3sJVDXfxqbrX12+xS7b6DrXYAcVbQnV9h+07dmwXqum83gBIErOT0h6ti1Svgj5NhjuVyQPgGCjm2X0hcx7M1kRooc4DKgqUA2AuFBx3fnH8AwW4oHC0GH+3L9MPbQCQf2TPuZTjaH4+bo9y+oEPGxL9IFfbfYkSzHAPk61ylpwjE4wKyA1qmgtMS6QQLWHPpkMRHYZTpdFCH61HFGtTIrRCc6KRuj30nxUBCMOOwggIr9bgFy/iizK+cAm/VAOXIklse+9LnYfY9m5f0XTvOnueTgCIvzM9MZCzvDVYu64bu9CRCx3brjqoeDokgUJH8jwTKfoEd3emyyzq/2glwTUEZ8DP8AVcRf5dgafIVSthCwp0tHeEojDHRXQJfU7X1YvgdY3g5QZ6cnhpZn/AMhdEigqdGRClC7oCqqHAaIAYNrITG6pOLWguHAm9sa4We0NvdANV1WdjiPTC83TuIWTuaYynHgfcdA+1JewiQCzqxW0bu7vEwj/M0IinwRkTnIPu3PsFfeeIFu4ePbpNHFi5Qdk/S/FhFCSvBTrQmuaUyJS8Jc8JFaXYgdrxKOiFF/B4uE2q/ueVI7rPld8ykZxQQWNOCMVqtyP5KmUV0w008gZRM18weD0Rhy865yaANFUl8m6WjsuY0hgTKbXQ00qBl16S195pf0QeDCCIR+eEeMWP421XpZaC+eZCZJgOCp/C6Ndg1Ccv6GU9Ooe+cbSFuxMSGC5CQ6awjXnnQZr99YDpJtEo17b6ScLmDz5g3+srHkZm6TgQWX5HiRfY3yJDRTCIBYg47TQ3EguI536ZvstWkibUTqdDOh28yXA/rXTQWwwWY0Uhj6GeaEHmKuxAUC8ehqKsxkeh2AeEgGiwWcE2gGAboOcEjmscwUumaSUSSa34wOusF7ELa7zgtAz3Eq8yr71eb3mJxRXZXiO8iEdB7xAOrvFq8ELFtgBOj9h9A2RmQvMxZC8X7WKJUKJJLHRs5YNnVN+bw2mwVVE5gqeXj9DpX4WvvH3n+yNj8nJG/QZ1dZVHfm3u67iSu9H/o4mz+7XtE9lr3Jvbdr81YuDIvunyouMfVuDgrHnJb+Ym75vQPe1JgMAiQpME2R/4gGAwUKMtfbWiT8+rG16i0GSJiTelgngLhgXJdNQ9YHkGH0Vr6nz8lGBEwsWThZs7+Z+p67Q67/TFuukL+xWFBE/OWVgM/7mJL/fPXi37O17q1oPIn/pXqp/IwJ0zu5dvpTzUj/hQf4p91JiJYsfrtbKdZ0SWuhGqaWbNl47lZtcYt9XsR7Q4IgYJjeapCp5GttOHzr2AJNzwdk1DQ01lnYguzsh/trj4jQnZ8rYLMO5G2HUY/+Nb8tD5J7aEbT9G+S2H0FbgacuI5qslp57XMbyF+N/R1mhgQUdaSBWpROetTo9c8c9zLp0csspad8Y/bkPBiUt1Ty/oPSk09Kke82eiZlCAqd27oJx/fl3eKxuG3thi75IKv03J+uxltleGEtreEbOBH8E9T4O73nV7BAEdZeygWHtZEPGuS4LKSMkHZ1u7BNV0LmSXQgEhNzCTBJTJoqM8wQKmAuEQs4Xmn/pexTXQ+8x31xx5SF41b9TqzD6pp/YPm94MwTcmmGDMjTY3YCLEf18ukxY/3yFmb0IPYV/ZZClgXCmAIAoAdF6OAWYwABCWeJDuRnJhdH0qSmjIJwC9ubggrebyI0KSVbDRzapJptHE5dkXXqi0hT0RE+DbMSg7+8IFYXnFwgNHPT0Oi/KwAQsr6udSGg/APUU3xr/RYAxwRc2F4HpyofdwXgSSi0CKp54PAwby4oU8RZsm2CVRiSCw7A2LuzXFOgN+OFmw0ep/CuOb2f/uEZeyvvfSudZVw078UDdrQZ9JltBJPRfMIVyEYFpOnzX3jn/2U0z4B8Fh02ZMycwi3LT5QGYqPJ+c9flLAAJilot6sg+MVD+rvgO/CzihojXInKuh50RKgiIQw3zY9lR82KkJO/Nf/6hu7Nju08Lr6oQ3ew0494OjCG1eVJwcV/8rmZ7x9ToA4BJywXI2Gq2nd/VxkMEmqbVesraew1m2uISWLYqdoftXAKAGG+4J15Lf9SZPmcFJI43RQ5aP2xlEDvmoczRX56C2taxZHx+WMFn77outO4c08+lkSut+k858b8WBSjf3o5Ju4DBxDkMDQLAYADGF4KGn/K5OzFVO6h8d63FDSqznvw/zwCtFtbWF0Ae2wjuJbXEVnsORsn/9UriHpBTszLZR6c3Hx3ybjo8RkrJ1YvkvIM8geyMcjNY8h15r53Kblhej/DZRLsLIRRgz4vk9E0xtHTPjKLMLX/nyPAbzveL3TZi4LaLT85P/daRuxIg+T/mjuoL8HuNakeVY03vAyJHDxl7+0TEdrVk5dUB3bz8PRxZas2zGY3H1V8XOynMtBED0FPvQvcA9F/covAK7n5yjFyIXDlRR5xHNbRa/v/CVI3WF47pPbU1w25WT98k5xxD04txx6Yn1NQwZRT/FEVx8QBhIcsFGTR5TDerHW7bBfD1eIpnfTJ15HWHaSFrPaCZsm0jj+ZEEIx1RQ0uX/3xt6bJlS3/5ddnSurTUJSXpGRnpi0vS01DkrZ07d+6oNd3eQXzEuj1jRo8es8e0c0xhYeEOhuMiPJLiqNWhbIk5TuCkhwdvrPxP7RPK1+Ym7ZO4S8dz11rrPvGP21jw8eXaBfN7TQwJmdhn/jz4zw18qUuGo046/0yvvrgSO178IrMzNj+W+u/NjL54pFDvxL3/o+S7qvI9XLj4kYir0pyg/hDln7/OGnSsrtMzg5ny7zEuNHR890bl3+fJJXcjkJyaRpX/weQkeCch9auXnXsPvUPw9gbdAC82VEWkd42p6g022CjAKkbAKTSA6g71itCIdMpo5y5DO8d3HxFYd8nQdvEAvwiDMEJMSXQYxM67c/J1EoDUThfOkvkjQZnGItW7xm8EFr+pGCpMEIjZPVNYTl6U6qGKF5sdbEbu6ZsFkRf7oGbEWTA1g9NYcIenqJmL9dhCq+1DQ4kTIoQaQ1Fe09EfZ12Ha/SHJYETrYxp0JWRS46euHr4+DUS+hk7dEju4GVnjt069sVtGf0gLsrNHwsjknoEtd1a+syHlevkrJHZjz2WFRi1femGg9+ulvMHPaHICnPDdbRAygRm0E/jU1M6qIUsetcINl/YRG1cN+6BaXWTL5V4PtRMUfjFrLgcVKv5wDePHu3cwTfCJzB4UPvl2154QcrE/1Q4Xs16TCfbfYy7X0aDKqBOwW8ekR8eYmcmy3iGVrU37zloTa6m9Hq4ExGrEzGqaYVQ666xb1bV5uYNmRVa9+WeQXmXfkMrHLPWFqenCM3uHQcQhAAg/EnwcAddeCnGMS/v4iESE0etEalOtqIslINICfNI5IwrKdEZK7zTXDZ+cw8v+gIvvAcnDxmCztw73ijHwwGQqsmFASzmrAiNNqUXTdsBD5j5Is07sMBWhiedOQvSvINEyw6IL27vRWtW8nRFOsLTQbp2OppBJ7ds0FkqxxAWInU0nW40G61ikvzKNfztiasI/nQCf3vtDfn7cpgEBXjvOPrRw8PRUuzs8IDobwCBBQDhJnkOT1DM8RgnXR8VT3LXeTir9kC1PZy65WPp4EuHAWSgnwjVdCSRpmgZ5h3sIQ+TJ8rMTzdSM0IQ6IjEj6EZvw7z8Y3PPsO/wXzy3hedgE87rjku0speFIbMCu0NuKdQT3A2gWGcVNVUOel5VtNwAhWxRkrug0pIkSz8KEjQdON5kfIBwU7W2GGJNN74i798E3rgjOhdZa26hbTw6qDvkh3QBs+C7tD+FLp9L3TaPr0biTgMSx4lxgBIdBYQqihv8nvkPxKbKiWFSetRqOOa0OPo0b3om6odCn2S8Da0Xk4FrUBbQMtjQCxNiWa70doHMnC1gmadmyKjnVH4eJaHZzLBpInSo4LKF0aMGjXihcoOo/oNGjx4UL9ReFviH6+dHj/dPn3i6ddqEldbXp5/evz+mNj9Y0/Pf9lC8XgT18KBD611htTiG/jSS7hWfl/BuwXBe4YG71axNj+Ctx/FmwxaWW3Xmf0Y3uYEBV+GPlspiq/VFKqg36IgZ2he3tCcgg5HX8wfMyb/xaPfUTwn7GsXvX8SxXN1Ys1rpyeShxh/+rU/EhU8ZsAl4gUhFgSARGAzECSaqly2GfjqJxb7JTdtAXRHKva7oocjFffQaU1csC0bvD4ncUj7lAGvvr5i0Na+CYNikweh37d+mdm9fbtxT/ht+SSra4eooh6Kv1KGV8JSsTPzV6IYFVUxpqc6EFC7nBb1y5oKa01zVSn1UvBKoQrC60puxFNokCJAGJio8cU4ueUaM/GkG5iObmz0uO+xEG2ivTBV0zGQjuUtm4isKF0/LLjCuoL4+MqTQ+deQsIH6z/+6PTpjz7ecVBAlxoDLNLiMy2v/xoMIz8Pq4ZtQq583/KbLVJjoAUS7QjEiSTfEwoKwH0R4JpG0O4m8ih2i8SqZC2x2gwVLZGw0AIbe4CvhX7s62otmglX0S1oJYwXSSgcyRsDZrIvf5FiotBX9REesbHSczvdf608+5OIrhcNHDTKHS5DQ4r7b+t89KhXef7cyt/P3jxnlycULpn5e6Wy3nkNP0vZ4i1WsdoeECXPB1Uj+QLUmAe1Z6QuUik9TYxMdNpbiWa6jZVEoi+xGZvHxxGTF4mpvQ+NKXyn5+I1Kzpak+LXrVnbw1Yw0t5z/dpN1iRr7Kq19bNrXnu1pubV12ompXbJTF267tleB0YVHsreuG59Ykpq0qb1W/v8e0xBec8169G8QxhDdOgdCBqUPRQIgPg+2ft+YKqyJn7kEfy4TGIzrUFJVYm3UYi2Az3d2OQ9DfWSwWZk7Gfk61bkaqYa6VjeTHPfw5k0sJiUf6SlTvkHLegpmAW98dPQF++Go/HuOrwTFpK/YDwNGoQOaJEjofLpyps3yYBOsbV4hsivIqW/ka4F4KuM7FDZezDWLsmAvpNiK7ylYAnRsnCy/ajF+8zPP/+Ma4UW9T8LH6O/AAK5uLW4mvCqldjWs1hni+qb0t80u4c5c5Kp2tywOVWtjHexYe0dwpSuLK5Nyt4ysQO9G0Z788hYHt1kpTJXru5s1yMjTW6KvHkbzgLTyntzAgUXVw/tn9UV1/zyA/6UGLmvzp27evl7tT8P7p/VBRqv/g71JMe5ekHp0rlVt392fBLVJzwxfv7R+MdDElOegSfyVkZ1Wlnw1vFT52U4d/Lo3r2HJWW8++aw1e06rSp45dPLJ+XC5YW9Bw2K63KonUdAM9PAzkOHJxpMnn4DH+tboOyT58WfhDnOtWnFMjCwmppROrVc1VtHDH5E+YHsUon8CXNqa3HQrVviT2fOnKEZi8GkruEHqQq0JPomHsxQ+DSGLEVMI2tayYWV7juLeJ/HYkjht6hR15ZISmox1u4ZaVFaRu0GT5G8KzeKfIWeqFkgkXaTskI9ZvO6+BTO6vtwpV2H9e4ISvKfjeIgJNp27ztyZN/uchFtGjYsv7Awf9hQhzcc/OdtOBi/cvsv/OpcuAe2gZFwDy7A5/G3eBQaIG/d/eVbs974eu9mOX/gymmzn342Z+QyfAdvhROgG9TBcXg7yVknQxvui4/hKtwH2mkfAqoQfFiNWTR4i1Zf30+dUJ4tkWnqhg4hZKCKCFSz9IemXlYvs4phfaz9sp4UZQXrY/WouCJdn61HJJdyRn9Bf0NfrxfzKjz1LfSImI/6gMZ0iforzMmMaFzfDPcPI6ojrkT8EUG+BSIMEWjaQeVamHaQXodECMWEvk1lVCKbzqigkW4egmVKn1mlrzz3bPJjXZ54Acqvrl6+W98Mr7BOav5Mj5zO6KgpNjA2de7EKbOtaZlxsV7yqNK1y/Fx65Co0s5hEzLaR8coteujwAxhlrAJRIDqvy4BHaiGXRsuAQhK4EzhqBAOJNCccm25IPBZQponO/qxY5mQBWdC8TX2W86+NCTTqlwgqnzrCcygE0gGa/jMNl9j4i1y/q5Jw4MB3ibW8BtbUR1wJYDk3FqYvFlzEVmlFiTdZg1oQS+tseX+mm+F+luVNmFbdDWpvKZNSJ1FbVhCw6dGDf8qpR9+TZV+RDZ2JQ12Zdm5WoaGh7fCgK1vpianJeo8drqLWb32lHXN71NQis7xPAtTXHj6DfyW0H9ZSfKw4KCneia1zTQZTP2iErp3XZ6a+ERnpq9WSM2FfCZPDLSLievSpGuS72iLvpGa76Gyp0SwoVXSMUb/ni60d1flz1l3wugfuJ91RySF6U52ByBD08vBtwwrkQRNF1HJzqJJ27dPKtq56sk4a/fu1rgnxXcm7907efKOHZPjuz+ekNCjB5OJIxquCXWSB8HLG3SluoWL4hHF0WQXpV3ycle0l82LU6Z8eyUkI9pFl+IbvAOO/QaG1x8RsoSVJ/AMuOoEXHT3chWl41NoJ/pKOgECwRjXrgKVMm8B2ssAYLGS1Z1C34XQevFAzV5H1do2A/SQTj6CFWyqy4CkjtBXjv2wY0Yba0JqxttIfn39qp0FsxcjmI92rocg4fG27ZJSOsjj1pfO6DdzwmQZQDAKlaHrJCcdBT7URBoJ7uUy0liItFCCjoHqA10OJE/wViD1UwLJAwXTyyl0KKNDOh1q6AfZdGhQgOkzk2+Uh2qkZFQosyiiyP6LgsUHY6PSo7KjBPKVKMJK3lHBUURmXo6qiSIC8gNyq7ytZlv6to2i3w00KAHtTk0QRY1SaRsB4+H+zNTMtPh0SqPSza93T328Z8XmFYdk9Ha31Ixe3bvNE5+O7xAZ3y5UHjV71uTE4QH+I7pOnT9nqhxtjYtJSlyi2HuzST7/cWc+n+rCdJHab3RooEO2SLP5IqULeVdBE/VE3rxFPxpBB286XCYf2cD9fD6gpQACaxQw05Q+9EK45oh0XMb1bM4NJDYczOIAOeAh4XMuDuDhEizjC328XZtzNEEopkJYjBguHVMweErLusu6mFk9U0dH1JJQyqaXZqemCM3vHR8Un9AiCKdJ5xWapAEgTGU1ia01cdQHGhUQUFxwstVCAW2vsvigBTnXsAMK1+DjyA0Kn52F0t2+7Df3of5wg9BFkVNC7H1yKXYO3FBbi/r/ocxfhDPhSQLpDTowf9pNZdipLAwgcnHCZqLWl3AyS6RiGibCNM+MQa/u1qX17NY/REjw7N937Jxn28W0ay2tUuYajLbDLUQmSqAH3wf8P9j3XHewTeC82LD4cLjlwxKYjrajki1mJudmEXuknbMeNQOQFeREsL3Eg9ojdAghA033uB7p8D89p2HW4T17jhzevffIW0MG9h8yNGfAYHHmpvfe2zR986FDmweOGzdwes748TlMR08EW4VVAjE8wGd+AOjAZ3Aqu28DQLpMdHUkOA+Gom3k9XPoD4heAt+gdwEABo5aBB/lOzKQqhhsOHBr/C75zjkhmn6Hr2pk3ykm39klnWDfOcu+840wi3XNfQsMaCf9juposO8ABEbimcIXYmfWA9YDEEl9v/NL///p/JJZl5eye6xO+zaOdYPRQ03Q6yh9ct9h40f3m45+E+CfH35xfcO0pGDS+oV2r5ubm/1sTsGkXNb6dZi0fnUcPhjuvsZsKqUnSReKIkBr9mRZ0APmAndwwEsSxWjySCqMRYWZCT+CwymMwRWmuwpTBV6BQylMM1niYUarMMfB6/ApCuMtu/yOlwozESyHecCbzEVhaCzIi4hiLe5lKuwxmAEPUFiTRGFNylEwzLdp+AsA3WDJxnLJW7iqz0c1PwiiMxRkHyHAPJdOFrsnkJ2+CSCtMNpQpw3wLrTAl2vINGVgL6LueAodcslAO+gF8o/aB0b2By0k/Dy4fqE39ngHXyJ2wRXHXB/U2vGTL9p69yac00JS2rmO4fHHcAIchxZAoOwbnEr7nghdIgDdN3PhkYZ6cp/197C1bqOsNahqXGuZ0V+F6a7CVIESZR0NsguMlwozEQxvXCPZZY0avqC9HGzOdsqcDUuUOSUJNf7eGwCghTqLCjMTJCn85abCNJwjMHMZXgpMVUOagpebrMK8T2A2MrwUmIkNgQpeDIbWKUmN/ABaKzWzTN7Nf8QpC3ZBAk4WuExYoOKscFkgWjZdoL1PAlXFArUjhGABFZcjQSP9q12LdCSuL4haW4GN1S5q05bRonZtERvxyPbt91u3WmEHa966BAW0/lU0Q23hQutxR9bChfswmit9D2yfdXTus98b95nOSSul/0CXSGA6Ofe9H5xGYYIkDx4mQYWZCT+BUylMsCtMrgpTRaT0ZArTSnaBma3CHAdfwMXsd1xhQlWYieANWEzXLoTC2EIMtpbOtYOgN/hauCEuB55ExgYQx8K/QoBG2lEismMPdGykUSsjhIkQmiHUQdgbpuCqTTAZpmzCVWzAx+BTsAvssgW/zwb8/haYiT+gcwgEn/2kP+N3EADCCRUH8B0HfPywPR/ADtWGjNqH0sBbcGh7+tJWeYlmN5XWDVbER+ND1LdjiWdqJEDiyJmhEum2EFMhEvppGjr6b0wftKk0bwztSih47cn+m5b0GVjfM8wiwzux07vtexdV+ptk7BOZH9/Y59G69YaLA26XKW0KJAp5acD3i/Dd7BWxUBjWpt1vB1OLomD9wRYtfjvE+IfVsbO1SHLyhlnZs0bJna2XCmNRYWbCT5U96+cK012FqSJ6dCiDkV1gvFSYieBNZc8yGJsfkZSqvGf10GzOFOec65Q5vSSFrwECmwjMQtaXZQLZfBU+Z5raIfBwRhrdPegOp64d5OpAbO6urpuPVWlfoQU7Rh+ntQ9X/FULvfGt2r/q6v5aQf6TbPjXusqqWvwleReOA1eNHb+G8e0z5Fl3ysEgEgzSSBxfrhrFtbVGLzUaB/4avgrxkZh7SZqqXZrrGt1dky8wcQVPccQMbvRf4Nzav069+t1M2PX8sf6vRHRsOy8tLx+/t3BE+vApYrcrd//9xrSzaV3xTysrKkKDjgW0yeneC5rWD/y8Z9+CTcuUtWB1v9IVshZdnbpkMQika9FODmBrocJcVmFmwiQQQGFiXWBkyQkjg6oUM4Vor1MgwH0YiwpzPC2K/coDMNJpFWaifwvKRR0oDD1eK6ZaO19vFadj4DMwjULGyxQy3mBLdsoZAcQ1XJeXin1Ae/AY6AJOc9XNmkO9Hl3qLLBSZ3s6CKYrlh5bUZJelk4rntOJ3shOH5GOpim3iitq0hvIC1GeTRc624PYiy2dO6GGapk2fLdtrOaSRKut1bTztDNfH/rwCB5LcPB1o5p4HmwsIRWvLj2Tlfz15opjt375NG9Q3qRrSK49Oem1pPSXx3x9wzFEEFevGrWw35OPnaqflrWh7ZmiucOFjPHTPRA8OM40NKfHqAM79rzeffi4YZnN5TWHumSkZ+G7P62Rl+xv3/6FmF6Hnux4ZFS3zGz0S9kMqdWEUrbG/XAqrU0ma/e4065JY3YNq6uVvif3n3Dy4hLQgnJIiFPfqTBXVJiZsLPCr2EuMLLMYBgvpvlTiFCdAgFUGOmMCjMxMIhyT2sKY2ttsFkUPmugzbeljB8/cto9Y4HE7B7VXgFlAKAC6ZQTRgYzW4hai4bZT4cJTJ70B4NR7B4LQAxKp9o9+wnMTOmgCjMRO4AMvBmMq92TQvi/j3QTWAhX7wSkxJivPAgOIiaNV5BOqc637/Uil4AOJq8ges8Um2EONsWa0k3ZphGmKaYSU5lpr+kt0wcmT+IaBpkoTEis3dcUwvReiIm+AF/K+zQS1lbD1AavtvRDczBLGepcm9r8CAv6Aqf3TjUjCTpLkYnxEVSi0fwbDceQK2fh/uJRk/CX3/+IL0GfSwO3xon6/hn4dp/vLL0jew7Y1uVsH9x8wfaw9eMWbtwq6SfgG/86ewcfhwHVP0BzepyUvztlS9E82aeVvsqY1X560b3U6n1LO2RUPDvnTbpOrL6QyZ9+ivwZyuSPWSeq66TU/TH+6u/kwT0Kf7WWFSgV5rIKMxMOVORhpAuMLDEYxoNDmTyMeGAu2aLCHB/O8Il8EJ/TKszEeCYP21AYWxuDLZxxhEDwfFVMFA+ynI8nSOXPaFOsVLGaNeOowQRAT5aiXs9U2vvvxgd1w6k1S/7ExHq9cBsvpqly9PiXH1y8d/simY/gNZPUHh7m7Cq+1oQZWa52lcDbVa14u4pdqXaVkTCMakpRHlKNLOtD7Koc6H41fnTME+vGDx+F//6lw7CoJ9aNHT2+rmUrGUb4x7cqWQDrA/1lfNm3fUBJCYqshfFGnw1f9LhWZrqNP/FutuFs9z+29FnUBqIhnl4nd3ad2RY67G5uJ/Yoa8FquthaDHHyxm5FFphkN7ZiKswpFWYmHACYNPB3hfmDwTDeGIIYhI5BaOc6qMJMjGOSgMHY/Gk9gfJbrN6HzZfrnM9fmS9QNjXaUitJLDDtv+tj+U/ViTbdx5Km1InWdVozvOkyUd07jje6dOfrRNXnY3TIVehwl9EhUEeejgZ0zYz/IZXBrBaEr6XWN11LXUpLxBU5WthwXdeDnYMVTmxOEgvlDxhRQ6KPbjD35jxE+wgj9SppROAseUfz8768ojfzRcP+XEUJX0Nssaj9zdSxUE/ckNRiVpqq0/WoX5y7OAvXEx8oEwrd1mYLs+lJHPRUjnsF1sKO8YUd9x6o8PCEPaEH7ADdYS+9eyUurMRWX6LykmS3Tyrxp1WfAra3CU0QsZdCQQdiMc3WnJb1yMYQ/ribBGCk+iCBGEoJZQkoj3tmwB8aF1FNlUqM5k7HatW4UVpgmjZoIBeSVG0aadjiM5mZJxb9iv8mEmHxycyMD6fxLTL3xs0vLSkpWVyyQLjT2C0zetjwUTCuzkSkQuHw4YXaphkUuff4CVJ7ffLkTjhG7Z/ZSfLsKcS3dAOhLMuO+Cz7QW9dsC5WJ+Qpx3GSbIOORGytQkpl2dqPoFuZWO+/alXgHwoflooDUIR0geXNOrL8lKCWDKcL2c7yXe/7kWAiAhovms6OUeKVzhs6eM6cwUPnTU6OjkpKiopOlvwGFBcPGFhUNDC6c1JMTDKEyUpPgfi10E/6GxhBAmAlU9qZ3KtpqMtLe8ugXngprh1kk6s1XQwHod/sYd1fsEYmLJk1LOlAXESSVD1i+dDMmLD8VUMz2jM59xIqEn8WOhJL8KvzIMeaweJIqEhy3rOBsWMzKH5dhL/hcCLDJGDQ1GL6siZQo1UwhXV5blbKRfEALMQ73iPw3YQ7MF8Lz/Yqg4fKCaf59AvSIPwczK0CgM2B78Lh0Is/C5WIi+E7F6Zc9MVXoTv0IPhRXNDz5LcjwEkmc0/CJwEARpceDp3q7xJc0FsM/hSDPwX7MXjed/RQbbsuDWa0HYYCiXCDO8WEfRbO0JbYCAc8NzXla9iNjk/iT2HkT+fIGHsBKP4pbEBdhTvAi3CmXfAQol0j+c/MLhw7Z/bYwjmCJX/O7BG9R86YOYLmJ8FWZBUOApl8L4Bsa39ahRoG46EVpvz9Er4CQ15CEXgaXG6Ey+k8Awh8CxVeovBGaIJhRuEeDMFXXvr7b+EgnmvEc2EZXEfgY0CRME2KBAJ9KhDLjqJLjITmV+lhzUXsEGb2/OmogzCIyGQP0Ayk8/H8+31HdllydzbjeAoaycJYVSmq9XIelUkrnSKhVfCJFNCXpaVV2CrCMyer5NvC7G0221Q0w3EAPonw2/SZehK/4AqZOxqUgvsh/wfKsaIjSTlWbDQ7EI2zs/T8YQOAnupMYMhR53bvSHqcDhlskbyrZ6omd+jR5y1cjWeLSa1CZ3KQGGTsLw5om+os9J+wC8ftWPbY1DjfpHlpN/F3G8h/MOxmyvQs34RpSUu3wzM4Dp6BJ9HUV318jnkbYIuPUOWiSv1x2NrgfcJgPFDcrHKRwj97UJHwvdDx4Wf9Ct/T/DYqqlLWyx8A0cz6CFuAyY/qJNS2HjWpPfzJhf9/oseQqvkjL7xw9ewTa3PD02Y/XjT2q6/QuLo60muYW/llcMuTphYFBbmk17DRDugNgBAuWAjPGUA3Dc81d00lIHeRsh2KLYfajLzBeVarnnGeN8950Gz1idShA8XFH+DRHvDFD/EY4bysh6Hr16+fjoKwLEET8mW0H9XwJ7outANRYIsmz95cSznFHnsw726PCmymSZE7s+FqplxJkudpE+aPzpTbHw+GeeStNg3/n82ew3OPzp4zmQTQV4QegaCPpmai+QNnHf+vqyMs/4fqiIfURgwGAG4hOEogRiPTmzd1zjOZnmuXVFO4LIGr5mQsak5mJpzXmKNT8jb/Bbts07oAAAB4AWNgZGAAYen931bF89t8ZZDkYACBIx8E9UD0OZEzun+E/l7lLOKoBHI5GZhAogBOMQvyeAFjYGRg4Ej6e5WBgdPoj9B/I44FQBFUcAcAiWcGPQB4AW2RUxidTQwG52Szv22ztm3btm3btm3btm3bvqvd03y1LuaZrPGGngCA+RkSkWEyhHR6jhTag4r+DBX8n6QKFSOdLKaNrOBb15rftSEZQrtIJGPILCkY6jIjNr+KMd/IZ+QxkhjtjAZGRqNsMCYRGSr/UFW/JbX2oq9Go427QIyP/yWbj8I3/h9G+5+o5tMxWscbE6xdmVp+DqMlJzO1Bclt3mgtwOiPxcbmGI2o7KObO5lzmD+huI7lb9+ATv4Hvv74B6KY4+kdvtQ1FJG4dHCF+dH8hatOQjcCJwPszsXs7l1oo/HJa86vKSgqu4lmdQGjpXxPH/k1PEfj0DaoP7ptc7vQKphrtAksG81RySdb+NnazfUr/vEPiGj+1/jGKCizSSLCLPPvPi8Nn/39X/TWlnbvheT1IympZ/gt9Igueo8S+hcTPspAYdeXBu4c5bQmrYO/f9Z3nM7uM1prdkq7stRw5Sknc2miy+mn35BK0jFGvqGmJLS5k2ls66t99AVzPqpkHKWehigT/PuH+Lhj+E6QRZDDSyRneH+Qg/moscqXIcLLDN5FM5DTN7facniTZzlsY4Bepkvw5x/io7UkeJaDZfAm8lt4kfxGb/MKY6wuI8UbGbxNX9JrV7Pl8BZBDoPpFjjY6+MFVPw4OfndJYbLPNq5I7TxnZn8UVtmhEaSzsgYWK4ZN8gox83b6SL1qCFVKeBGENNNJbXmJLu2Z5RO4RfXnZyuEuVcQZsTn8LB3z0FW2/CPAAAAAAAAAAAAAAALABaANQBSgHaAo4CqgLUAv4DLgNUA2gDgAOaA7IEAgQuBIQFAgVKBbAGGgZQBsgHMAdAB1AHgAeuB94IOgjuCTgJpgn8Cj4KhgrCCygLggueC9QMHgxCDKYM9A1GDYwN6A5MDrIO3g8aD1IPuhAGEEQQfhCkELwQ4BECER4RWBHiEkASkBLuE1IToBQUFFoUhhTKFRIVLhWaFeAWMhaQFuwXLBewGAAYRBh+GOIZPBmSGcwaEBooGmwashqyGtobRBuqHA4ccByaHT4dYB30Ho4emh60HrwfZh98H8ggCiBoIQYhQCGQIboh0CIGIjwihiKSIqwixiLgIzgjSiNcI24jgCOWI6wkIiQuJEAkUiRoJHokjCSeJLQlIiU0JUYlWCVqJXwlkiXEJkImVCZmJngmjiagJu4nVCdmJ3gniiecJ7AnxiiOKJoorCi+KNAo5Cj2KQgpGikwKcop3CnuKgAqEiokKjgqcCrqKvwrDisgKzQrRiukK7gr1CxeLPItGC1YLZQtni2oLcAt2i3uLgYuHi4+Llouci6KLp4u3C9eL3Yv2DAcMKQw9jEcMS4AAAABAAAA3ACXABYAXwAFAAEAAAAAAA4AAAIAAeYAAwABeAF9zANyI2AYBuBnt+YBMsqwjkfpsLY9qmL7Bj1Hb1pbP7+X6HOmy7/uAf8EeJn/GxV4mbvEjL/M3R88Pabfsr0Cbl7mUQdu7am4VNFUEbQp5VpOS8melIyWogt1yyoqMopSkn+kkmIiouKOpNQ15FSUBUWFREWe1ISoWcE378e+mU99WU1NVUlhYZ2nHXKh6sKVrJSQirqMsKKcKyllDSkNYRtWzVu0Zd+iGTEhkXtU0y0IeAFswQOWQgEAAMDZv7Zt27ZtZddTZ+4udYFmBEC5qKCaEjWBQK069Ro0atKsRas27Tp06tKtR68+/QYMGjJsxKgx4yZMmjJtxqw58xYsWrJsxao16zZs2rJtx649+w4cOnLsxKkz5y5cunLtxq079x48evLsxas37z58+vLtx68//0LCIqJi4hKSUtIyshWC4GErEAAAAOAs/3NtI+tluy7Ztm3zZZ6z69yMBuVixBqU50icNMkK1ap48kySXdGy3biVKl+CcYeuFalz786DMo1mTWvy2hsZ3po3Y86yBYuWHHtvzYpVzT64kmnTug0fnTqX6LNPvvjmq+9K/PDLT7/98c9f/wU4EShYkBBhQvUoFSFcpChnLvTZ0qLVtgM72rTr0m1Ch06T4g0ZNvDk+ZMXLo08efk4RnZGDkZOhlQWv1AfH/bSvEwDA0cXEG1kYG7C4lpalM+Rll9apFdcWsBZklGUmgpisZeU54Pp/DwwHwBPQXTqAHgBLc4lXMVQFIDxe5+/Ke4uCXd3KLhLWsWdhvWynugFl7ieRu+dnsb5flD+V44+W03Pqkm96nSsSX3pwfbG8hyVafqKLY53NhRyi8/1/P8l1md6//6SRzsznWXcUiuTXQ3F3NJTfU3V3NRrJp2WrjUzN3sl06/thr54PYV7+IYaQ1++jlly8+AO2iz5W4IT8OEJIqi29NXrGHhwB65DLfxAtSN5HvgQQgRjjiSfQJDDoBz5e4AA3BwJtOVAHgtBBGGeRNsK5DYGd8IvM61XFAA=) format('woff'), +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 200; + src: + local('Roboto Light'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEScABMAAAAAdFQAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAABqAAAABwAAAAcXzC5yUdERUYAAAHEAAAAHgAAACAAzgAER1BPUwAAAeQAAAVxAAANIkezYOlHU1VCAAAHWAAAACwAAAAwuP+4/k9TLzIAAAeEAAAAVgAAAGC3ouDrY21hcAAAB9wAAAG+AAACioYHy/VjdnQgAAAJnAAAADQAAAA0CnAOGGZwZ20AAAnQAAABsQAAAmVTtC+nZ2FzcAAAC4QAAAAIAAAACAAAABBnbHlmAAALjAAAMaIAAFTUMXgLR2hlYWQAAD0wAAAAMQAAADYBsFYkaGhlYQAAPWQAAAAfAAAAJA7cBhlobXR4AAA9hAAAAeEAAAKEbjk+b2xvY2EAAD9oAAABNgAAAUQwY0cibWF4cAAAQKAAAAAgAAAAIAG+AZluYW1lAABAwAAAAZAAAANoT6qDDHBvc3QAAEJQAAABjAAAAktoPRGfcHJlcAAAQ9wAAAC2AAABI0qzIoZ3ZWJmAABElAAAAAYAAAAGVU1R3QAAAAEAAAAAzD2izwAAAADE8BEuAAAAAM4DBct42mNgZGBg4ANiCQYQYGJgBMIFQMwC5jEAAAsqANMAAHjapZZ5bNRFFMff79dtd7u03UNsORWwKYhWGwFLsRBiGuSKkdIDsBg0kRCVGq6GcpSEFINKghzlMDFBVBITNRpDJEGCBlBBRSEQIQYJyLHd/pA78a99fn6zy3ZbykJxXr7zm3nz5s2b7xy/EUtE/FIiY8SuGDe5SvLeeHlhvfQRD3pRFbc9tWy9/ur8evG5JQOP2Hxt8ds7xLJrjO1AmYxUyiyZLQtlpayRmOWx/FbQGmSVWM9aVdZs6z1rk/WZFbU9dtgutIeCsVivND1dsWSG9JAMKZOeMkrCUi756MI6AN0g3Se1ellm6GlqOXpBxuoNmYXGlgn6D/qo9JOA5ksIFOoBKY79K6V4qtC/ZJy2yXNgPJgIKkEVqMbPNHpO14jUgXr6LcK+gbbFoBEsoX0pWE55Bd8W/G8BW9WNboZ+b/KPyWslDy5K9biU6TkZpY6U6ymiLdUv0Vyi9jvt1boT+x9lTmyXzNUhaHKIcqyEaDkLfw8YTQBNDpo2NHmsVjZtrl2u/kZLmDlHaT0BJ1HTZ45+gbdfTSznJVOK4WQkWAAWgiYQQB/EVzAxYhheIvASgZcIvETgJGK8NfDdgN1GsAlsBllYO1g7WDtYO1g7WDrMcAK+a2UA6xci+kp0i0EjWA4s2nMZO6DNrE4zDDbDYDMMNptIHSJ1iNQhUodI3R4DafGzG8JSKEUyRB6VJ+RJGSbDZQSrWsb+KJfR7OAJ8rxUM/Z0xq6Tl6Re3iTyjUS9WezsQ+7e9L7j24G//uznFl2th/WAOrqPNelG0hq5z6Srk6Ub4Kau0Mv6qe7W7ZQPsxIhPcgeX3sPns6DCDjYSX/9rj3/7ka8bbeNGQXHE/UzyZb3Naqtt/W+FAepZ1J3mVOWPoW7ipYzFE8hSiE3Erfcabyo/I+kF7TVzPBMiq6VU3Wr/FGy9F2y1MD5aLfeG7ukh3SKztOQHtOldxmvgTW/3uWKBeLrqifdSuxbPeNypiOTPb/StfqBbgBrYCOIKkifoH6ou3S//oxFky4jLzLWvTSoV/RrU96pR/UY36Mdx9VzerNDbA+b/M8UzXE97TKTYCcvdY079Fxl8v2duY3vJb3Y3lvbjK+QWdMjScujKb226ze6V0+AH9gHId3G3ghxPk5yZs+m2BVzo4j+otuYZ3wX5ibGa4uP3R5tYufcaU32pGm7er+ninU2ffVaVz47Mt+tHXstTVvae0Cv3PeYTjqG4n5v927ukWDyTnDucuZXdXEerpqzcsc10D9M3nKnmNPFnZ6n7nOlY/RxrdBhYDA7yovKyx/Mq5N0vr6l67EIaA4ne4k5369QP6Kvpd4r8RRjZ+hP4PPkPrp4i832qOJ/AP1E1+ke7uE9nPDWJJ+Jrx4Cu92zEZtr6m93h6H2O7CDtjENA6eSpZOdzwL/84C8m3g93kuyeVN44C/L1LyIT7J5D3gNqz0SVjloc7lZuAc7/RfC3NHu/+dBU8tP6vORAnN/90poeoM+5H3vIaYsM3omo/oYwfVdgLgpk6+vWxvGSuQWfkuMV4v5+Q1TAaIMIr2ZVYhyIWLzCipijKGIT4qRPvIU4uNFNJz8aaQvL6NSeBqJ+HkjlcHUKCRHnkEKeDGVw9dopJdUIBkyTsbD80TEIy/IFKKoRLJkKpIpVYhHahCvTEPyeGVNJ7oXkX68tuooz0SCvLrqiXCezCeSBbz//bIIyZAGxCOLpRGfS2QpHpYhPlmOZEkT4pcVSJ6sk/XM1325WdKC5JsXnCVbZCtlG75djiSFI9uwkwE37hv6Md6G2cx+NJYVzKs3MxtPlJOQ/sxtqjzEO7FaBpk5PMIMZtKznvgGm/hKiKsJPjcw3oj/AIgWgIQAAAB42mNgZGBg4GLQYdBjYHJx8wlh4MtJLMljkGBgAYoz/P8PJBAsIAAAnsoHa3jaY2BmvsGow8DKwMI6i9WYgYFRHkIzX2RIY2JgYABhCHjAwPQ/gEEhGshUAPHd8/PTgRTvAwa2tH9pDAwcSUzBCgyM8/0ZGRhYrFg3gNUxAQCExA4aAAB42mNgYGBmgGAZBkYgycDYAuQxgvksjBlAOozBgYGVQQzI4mWoY1jAsJhhKcNKhtUM6xi2MOxg2M1wkOEkw1mGywzXGG4x3GF4yPCS4S3DZ4ZvDL8Y/jAGMhYyHWO6xXRHgUtBREFKQU5BTUFfwUohXmGNotIDhv//QTYCzVUAmrsIaO4KoLlriTA3gLEAai6DgoCChIIM2FxLJHMZ/3/9//j/of8H/x/4v+//3v97/m//v+X/pv9r/y/7v/j/vP9z/s/8P+P/lP+9/7v+t/5v/t/wv/6/zn++v7v+Lv+77EHzg7oH1Q+qHhQ/yH6Q9MDu/qf7tQoLIOFDC8DIxgA3nJEJSDChKwBGEQsrGzsHJxc3Dy8fv4CgkLCIqJi4hKSUtIysnLyCopKyiqqauoamlraOrp6+gaGRsYmpmbmFpZW1ja2dvYOjk7OLq5u7h6eXt4+vn39AYFBwSGhYeERkVHRMbFx8QiLIlnyGopJSiIVlQFwOYlQwMFQyVDEwVDMwJKeABLLS52enQZ2ViumVjNyZSWDGxEnTpk+eAmbOmz0HRE2dASTyGBgKgFQhEBcDcUMTkGjMARIAqVuf0QAAAAAEOgWvAGYAqABiAGUAZwBoAGkAagBrAHUApABcAHgAZQBsAHIAeAB8AHAAegBaAEQFEXjaXVG7TltBEN0NDwOBxNggOdoUs5mQxnuhBQnE1Y1iZDuF5QhpN3KRi3EBH0CBRA3arxmgoaRImwYhF0h8Qj4hEjNriKI0Ozuzc86ZM0vKkap36WvPU+ckkMLdBs02/U5ItbMA96Tr642MtIMHWmxm9Mp1+/4LBpvRlDtqAOU9bykPGU07gVq0p/7R/AqG+/wf8zsYtDTT9NQ6CekhBOabcUuD7xnNussP+oLV4WIwMKSYpuIuP6ZS/rc052rLsLWR0byDMxH5yTRAU2ttBJr+1CHV83EUS5DLprE2mJiy/iQTwYXJdFVTtcz42sFdsrPoYIMqzYEH2MNWeQweDg8mFNK3JMosDRH2YqvECBGTHAo55dzJ/qRA+UgSxrxJSjvjhrUGxpHXwKA2T7P/PJtNbW8dwvhZHMF3vxlLOvjIhtoYEWI7YimACURCRlX5hhrPvSwG5FL7z0CUgOXxj3+dCLTu2EQ8l7V1DjFWCHp+29zyy4q7VrnOi0J3b6pqqNIpzftezr7HA54eC8NBY8Gbz/v+SoH6PCyuNGgOBEN6N3r/orXqiKu8Fz6yJ9O/sVoAAAAAAQAB//8AD3jarXwHfBRl+v/7TtuWLbMlm54smwIJJLBLCKGJCOqJgIp6NBEiiUgNiCb0IgiIFU9FkKCABKXNbAIqcoAUC3Y9I6ioh5yaE8RT9CeQHf7P885sCgS4/+/zE7OZzO7O+z79+5QZwpG+hHBjxNsIT0wkX6WkoEfEJCScDKmS+FWPCM/BIVF5PC3i6YhJSmzoEaF4PiwH5KyAHOjLZWiZdIU2Vrzt7Ka+wvsELkmqCKHtRYVdt4BE4FyeSoX6iMiRPKqYCxShTiEh1eSsV7iQaqF5RBWp7FaE4o6dwoVhHy+H5apHH6iorqZf85805OM15wrd6edSAhGJjfSCa1KSp0jhWk4gFiFPMYeoEleg0DpVcNXXii6SBCcFl2qieaoVztjYGdUOS3XslExxjbAHX+fyZYFqoTQgdCfnvz6snaPcl/AK611DiLAGaEgm6fRmEkkCGiK++MRwOBwxARkRsy0OjmsJTTLZ82o4OSU10x9WiaO+xutPSM70h2pFgb3Fu9LS8S1RrK+RLFY7vEWVjAIlqU5NdNUrifomza76iMlszavpbRIsQI9LjYezPjjri8ezPg+c9blUG5yNc9WrAZqndEna2etfp3OJL8+6s9e3p514oCS5argkkwfWZa8SvsIiNZZEMxzEu2qs8TYPXqrG7ouDD7jYq8xevfiKn/Gzz8C3Eti34JrJseukxK6Tip+pSYt9Mh3P871dHI9EumTkQkpqWnr+Bf8pvZNABJ7CgCcAP2Eef8K+IB/wBfigB3+K4K1rqGuwVk/bDRoziHaDl3/9z2ByXjs1YMwA7S14uY92G6y9SVfeQV8bRZ/X2M8o7bo7tDK6En/gPKggqTzfkY9Kj5AO5CkSyQMJKm1BDub6SJ6IPM3LteRFZBCm4g2rKZb6iJyCp2W3BbQ0v0Bx1KnpoKIko05WOXe9ku5SZWB7bkj1guDahhSvSzXDicSQmuWsV/3uerUAxCOngyrHFSteucYmprTJ9BcrZrcSLCZqiii7txPq8CdkwVngQlHYGx8OdSnsnJ2TTws7dykClUyjThrsnB1sI/m88f406vNKJl+wMJ9W8uWHHvvblsd3fPT225vLtu3l+PLnH//bs0ve+PCtj5TS7afoc5L63KqKSQ9f3WfnS2vfcxw65Pr+gLhi96r7py7r3e+V6g1vOXb/3fYxWNCk8z+JC8WDxI7aDdzpTh7S+aN2ctRHBOCImuCor+2amSfY89SucCjb2KHsqKdKjwKF1KkOYIHDpXp13UWFzYDDfDjMd6md4bAtaGlP+O11yO4am5ACRlCsds6HP1Iz89LgD6J27SS71ZT04mI1QYaj1LRiZArwIRyKT6VeKdgmu4gxqCfVGeKhfpp1mfcnrZ43d/Vzc+ZXjbprxNDRJcOG3VXLvXVDtJjOgTeqVsMbo0v0N0qE/gPmbt06d8CcLVvmDJk1a8iAIXPmDGmQhakdzz26euCcrVvnDIy9NXD4jJnDCHiz4ed/El4DvrUhHUlPUkEiKegVMpBx2VJ9xIqM684Di3oxFgVBeYK6eXeCw04utSsc2kGT7C7VB4fxcr16FfxGPmy3ChnZHWRkks8OTHInprZjTOqeLbt3EJM9MbVDZ11rOne5ijJ1ATaAdjgp7QUeDdTEbwrmOGgjV4rgUzkmB/WAHhXBRxiPhj+x1HnzwMiqx18adtsa+lynLpP+0u81bumM2w7d9/Hpyk1rR2y7VisRTVzBtEEPXXW12q3TPSPLJtN7K98YYxvz4l+rNq+dOWzB1TO09OuUMfM+/+th8ZGBt9ZFZlVffw09JpqEzJEruEN9Hr1pYYeSroPGLgAbnCb0IceY387WvbbhsqkiXeCvkVGN3nmauSxb6EOt7+3XThK05Ye1TtxEaSiRiYdQxc0YbAWr87AveQpdpCidSpzsc7mBDdnkYRq/SUp64vDhJ5KkLdoJrqeTjud6l9C/3B39Vdvu1bZHfx1/7RiuM17brXWivza/Nl+n2puu3cUtF7q4nKJwPIHLE1PQ/fiRow8nSS/TeO3EZkmrKOPc9EYv/QvnK7u2JLpXe8qpPRx9bwzbdyo3m78B4oiD3EMgpIKzoQVUcbL9cyB7EczExZy5kp1EIQjnv0NUQvPfQfd+ovP+TPTqDoW4FMdeQaEuhdvLqZwjP58qDnSmVBU58Dc20BQeY6jE/IrIh/ksv+gx2WiOJzWD3iiMNdO+Aa3mm9vq3rvtiHBr6Uw6VVs2t/Re7YuraCft4560PWH77U+WC52EHRBlbyEKKVBMYZXa6hUxBMJD70is4DQpwUPKo6OEsGutY3EcdFwIRSxWfM9igo9ZLXhoJZZY5AW3D6EdXL0clPvTyHT6utZvOjetnH6i5ZdrafSYvofBmkadZBfoTBbuATXG2kxjQDJoUwKSKxY3qszgfhXj4Iv+6pe1E/p1OnHdOBe3Biy3DV5HpVI9/lBFKAAW59XyXtREwB7G3nyd6Ddct9JS/G41vHQk6+G77WIIxl7feICXQAny3nr2o18CsUv10vXr8ftp5x/g/s0wkEwAMiHwgVX1z/lpmKZxoyZEX5gtdTjzKcNMi8G3BA2f3I1EbLiQLMW8MTqVFN3vOpv8LjAi1fCwqk0oRlZ4ZJc7HHInUhcXbMN59PAi695x8ekjR/44feTw/1SqGzZsU6qrt3KFtB9NpCHtA+0H7XXte+0j2omavv799Dd0/Lf/+c+3QMeu82e4DWItyKI7iQjo7zjcEeVcGXsLEO8wsQjACidslkeBC9SiGzNoMxMRMjcLRL6L/rtSNN865Gw/sRvyaDJgLBloToKjiAMptgHFaCRqPF8fiWdXi09CLUvWAZPMABPYpSrBcpIHPyDZQdU8Eh56HLByCrzrSZTdEd5mLQamqDbgj+IsVuLliEQ8xSzIZBvO00T9oI6FNOYefcHJ4h+f7Dr2zGJtMsf93FBJjy6c+OzDGzZPFjw7Gg7vqPyfFVo3sXQEl/rUOyOWrH91JdIx9vxP/GmgIxe0JtIW6RCBDrEtbkkEZkRSkCQvkORlCMObYMmrtce1TYGQakfR5unuACID51L8iDcS4DihADEFnEKUgRBDyXIp6fiuDMdyAaKTiJzOMEscEN4ewYcfYgegjrYsdsQB4FBJVnGxYpeVNgBJ3GpienFL5JEHxsMOGPU5jYxhyCPYJnMsV/7Gs6u27nhp2bI161eueLimnBP/3L3/h3nTliw+d3CP9jNdJC1TXnj62SfL1sxesvbFxdLLx+p23729fc5rc/Z9fQR1ux/IuT/YgpU4yRASscS0qJbYLJwdgDoAZ6lekQAYuwoUS50SF0LlVvhQxMxciFkCJloYPLagN5FRuWyoXLRY4WTFwVSMhmVAkqBnkJjkmPpxax44frwi+h2XKoVpeV++oSGrVHuclpfyvbiJzD9sBZszw77SyX4SSW2UW2qj3FwoN4+tvsaR6jLn1fptqS4Qmd9WzxC8s64myUkceSoHcRxFlOSMAXPmyx1O9OVOh+7Lr9p8ZjH6clFxuhTXXjBixbN351UP/tkVztpqvA6PJy8CrxkPZTwUlEBli4nizacRl8erw2aqmtHTpxYrSaABbtRsB8g3QsxJxRfIFERpyvEgpO5Fi7q4fV5wBtlbufHVy9a+8MITDz8ZGH0ztz+6rkvRwik7jx/9uvYXOl168rkDO9cdHDrMxadOjp4JdeH58+TwUe3PdwjzTyuAV+nMVnPIXSSSgNxKi/knG19f685MQIjoFoE5bZk+J6OrCinJLmSK6gPmtIPfgWTQUMHkTmAampkGGupzAgS0uYE4c7EiyIoJqZE7E9BEvykfAI2UCgYKbo0RQoqak7mCpn3cf3lxenH5wLWf9dg55cDx3w+8o52r3Pv08m0vV03fHuBS6OQG2qtNRklGWsP78weO1H498rn2I23f8PGv/3pxW92cu5guDAAdRV2II51JxIwaik5bJWie9gLFXIfpaixFg8CnOlAHiRk2zRfr0cNKeVOwyE08A/jXT5zNtVXacqn5C/GGsjLtx+gebemMGXQq91dqIoglxwA/7cBPPwlCjnw/ifiQo8nAUQuu2wE4mhPwWYCjObiFjoyjCcBRCR1AJhwkuNQ04KcbDnPxXBwwuBOcyM0ENGnhfckBJ2MxMlx1E3ACObLq5OF3B7caJxXrULKoGZJkNi+AzTfnsKfZ8ZiqRfcuPvn3Xf956N5FL2hnP/hEi1bse27FgbefXnGg3ZYli7aqCxdvpgvm72nXVrl/10cfv36/2rbdnnkHPv3kwGNr1z360JYtXMH8Vavmz6l+HnVqKPjNfxk6BejIGot5LAJkAQcS0qw8cCBBatIpbz0qFIQ/JRBSTV5dp5LRFdhZymV18LpmyVb9XAK6BzUL9Yz4dKIJi5BeAkaRU5RGWQKBuJkzcLNO7FByftenmnb6i4Grr4vvu2jwhgOFNZPe+m3W5uULtmVtX/XIK/zuozRXO6md1QZHtfq09DEZKV9/uHzEGOr9cuOxRSUrP/zytG47GCSCQldWD+nQhCYYIEAsYUbSADshlAAvyBCFpRFR8PCzculSwBX83xBbcARhTo7QDWKyhXQiEROgalXCC1ljAEkxh7D8IeH1CljR4AK0ZMOXcYCY0pbGMJOwAq+u28IMfgn/EVydgFf1UZPPT30D+O7RlRMmcGX099F0xhztlxQpRTs9B/fzFN3Af85vYvQl6UjLqlNnZdQZxKCNUPh5iu/TsJvvQzeMG0dXjRunrzkL1nxHX7OokBYV5lBYeRZXOWFCdAk/YMYs6k4GL+CcqT04mvH0ZjCi65nupJFJJJKMPE2xx9CDrSV6SNfRg5uhB4CiSnIIzaU2zUu6C3lKXCOkYElsXBLoCh8PhuKRVYsLHW18CjpaKe4C8OCgviB42Bh4MAWRqzfzdRtq3l00o1dyBc29Y8JdS+bcD1GHtlkmlLy4+9DmxR9PLRwx6oG7byt/Ztq8h5fed279ypVAzwytu/S5+DAJk2vIFhJxYrXCElaLxHolLaR0KlBzHfXK1QWqD35lFqg8Aq++zCRyIOfO0X2sBMlEP70ydNW+s1P11KGnS+m1FzzLGSVpL6lJSu7ZC+swtPGIhZYcsCCVtgWaA3Jvi4WXM3PzOxV2w+KF5FZNbZAJzlz4TId88NVXFwE7EhINdrhJIIPwEsYYI/3s4mauO8xLzJ70D3AkAMd++EQGofobPWiRh/n3GW76Ga2gi+lS2Vr3wcB75MLnyh5Y4vGf2Dhyaj+OD1lvKnr0RZtbU7Sntb9rI2QPnUhvHlLbK733B3dqC7VRXLHr1lG3P9KZFmQM7PigQr+mGzlJS9WGHNb2lQ0fNfqXgxoNFxZx0X0LR515iy6i27R22jxtkdahfbB/u470Nzp11au3T4UMlsvwJ/0M8oCsXvgG4oEJMqH2us0qfJgFhVrJTCi4JQlxQFwBy21UipHAigVMAPdBPsB7AkAo124KlzXr6Wjp07u5G7WvJVE5exN9WhvHUcg9WBzYA+ssZvmhH9Ycb3gHJ3hBFn8y0Av62XLMCwaYyJ3o/kMAJJje2pz1NaLNYwYDgPMpYHagyG0o/slCKlH9TpYioi+ECJuhY3JIxJojvayA7uUDhbGDPfSl76JzJy7aEP2HNo/Oe+HV6jXaRDqoasurivaBqOzZW74hI+HQwv2flK557IGNpcsWP7RMt+WFENs2g22mkrGGZXqAHk8yg+jxgKsYaIgDPBwn4Lk4CxppGiPNBSS4WPVTsYQYDDaF1HQslrhA+4TkYqRClRJRIeM8cMqUoFeNXODVBUj9UZ+4VOp1o4KF/RLEM7KQ5v72I3V5uPKEd17d88MPe1495C/nPNrP3/+m1XGjT9J4OvqPb6Tte7XDP5z6t3Zk1+vSl+fonehnUD7vg3wsxEM6GtKxxqTjwdDsjdUiFKsLUQHzIz7dfcug+FgzCAB3SU/amSBXq6mNjtDWa79DutXxMPVrP36ufSQq2nNa/evaj1pVKc3/Yfdxms94iesPhfVt5DpjdUtsdQF0Q9RVUeSZKuJGYmk4S9EtgFQUa0jPx40kXE/A9Z89/FMNx7i/R6/hg6JSFj1aFl1fShrXHcXo7q2ve/GaJj3itLamsaDtggX38C801HEHoj1wsbfujt6ur7Uc9OUD0JcMrKmlxfSlFSWpTUhMQ5DJ8uFAK/qCkNMUisQzVYuHNIvZga46aaA6yTKzhwRQHCW5WI2DNNFAmy3Uxyfr6iODMchMg5bTwj9+ohYfNzlp364Dp7T3n3g3S5tNz3XSogc17XVuCMjUQW/9aZe0fLt2/Gvtt+PaVzd3pLPKomevm0mHNfG0nsnyKsOjmHSPoojhWivPuGptkqSN9UcUm15lFljDpFGG2IAJQ64DTK3ge1RUNBwQleit3OazN3FV0RJ9PUi+6M2sBhFoJsPG2gVcDX/ExiseqUT/pH/3FsBmKnzXg3rnaMyNHI25kYVdCpTfHctcWQ5k05Vfz1UcwGsL5CiKu3l+AithZpmTXdj5Fq5843OLNlee3PV+xVS6TKpat32F4Dl38q2fxpXtNcd49jPzjzGeWZp4xtsZz3j0jM7G8ggXwooaUXm7nlFQPaNACsE5+y0U4nQQ2PYW13MxF93ALeIejT7/NrCvhKsSo8XRgMhtiQ421jbB2mIsAuBKBg+lGA8jPNN6XrTEKphMOL49lRwY9dntTfYkdYRryeQ241qmuHAjJbGKJkvsdUaa9AKkKhPGSMUs13BinB0jskmv92F1JcLbHCwKM9ooaoQnhwapySPvWc35JS6xqsIqRb8bHD0u2WA7msiBhjzAzebOakIDjS6Jzm7SzVNMN6+9SDebKyRoo2Dszo7ixt1xLGszG1tSeUtsQ0WootQk76nku0ugowchAJ5Lo8I/z94kHKfnUsG/zgLb//7Cupc5VveyXLHuJdj0uhf4/5ivzSAeNF83+Fssgvlm0Y6UUIF20d7VGs4T7cPK+o8+O3nqHx/9iK4/kY7U1mo/nNS+19bTETTpZ+1bmn7q1AmaoX17QsfvyJu/sfqFh/Rp7g3B/9dabEwHLS1DgS2E0cCJBV4jGqgem9wy8AYDibQp1v7+r3Pn/qUtoHNqt9du1xaISv3efT9G13H7X1n28Gv6Pmadby86gFcesOebSURGXvljvEpDXrVhG/DCBrwuNcngVRBLE17Muh2yjbWjZEiMABXIumalyaBOzVjo5Ux+UxbDaZdg5MTSs4O1P7s/cP0lubleOzP4RP8zqakXs5Qju4CfH4nbALsHSamhbS5d29QgsDQxmbE0EVmayShKAoqSQ0qSnvmlM/SuiCE1C9UgSTfzOFmRgapEomMd5uqV4EVYB6BBvN8Hfp41jZqJYBc9+e+zD85YXJGRNSMrbcsqbSy9++CO7a9oD4nb3j847ZXcNtsWLu07oU1C5oJrFz24KjqJ+3PN4sdXge1gLl8JculAyluv/2GTUU2BUJYi47mUhJYdxvbNOoytNBTN7bGmZ5ODLK/FJmKNw5fVvtUWYmY45AdCfaaWLUQhKKG7HcNN0jZv+Sxy9NQf1HP4nw89yE/6UN12cMc3P/2ufXf0i7VVdIX08voVsyue6dZj77rqT2ZP3yqK0vJdz02b9GTXHu9Vb/2AThp3SEJ/0QFk+BjDx2C1UvN6icKHWEor1aHuR0RWmRUBFEQk1naVsILXlBFiL6CDUKLZKrFScnaHeAPzR9Ws14b+skjPhlTJ8L2KtdFd8lgkdOHFWPUD3SWkLljsZaVwiDONAQfLGtWVX6m1xyq0o//+QTtGP+O/bMja+e6h1/H3zw1R3Q8i7v+Q4Z6AUakkHBs1QKzDAI1KLLGiT5j6w0WI9zMW0B2pkJ9uXxD95xTwcdeOHi3shFBKSTH4fewD+EitXuNRnGF2yQjFAACXjWekUEjVqUuNww4hyl7P4t7485erWVufuBTfXofe/9m5r+rkcaOUmO9Q5L2q2XdGVEzwxuyfb8FqIsSQGpfs9ORF4LVZQbGGM7tklv3t4Exmp0v2NXXlKaxthGziQ8fKvDiQmE6RRP9VFAmlOUETDRbPpJb2UhHtPIV2LpQKqGmG9tAU7bVsKUvbMRXIP/EN/VbwnjvxT/wFvv6OZ589t07nb3fgr8LiTLZh+eYwKwYbcUbPpjiMI4KVxREL1f8PWmh3elpLfoI+S1c9oaXQ049pt2m3c8e4D6LLuUnRUDSNWxCdA2sEYI2dsIYZEbupUYY8LGApUEx1DKFbEambWPQCivUDpBfWooirltG9dP+y6MkKUWn4nG/XMCZ6gkvWaYDEQBjPdCQ/FstjeJXn65sUxaRXqAE0G425cCENYBEk4LuTH9bwBv9xwzp+9gjh57K/noszcMI67W16UpoHdlXIKimA7LGSQvlYnajW5CV2IQ9RDphX7C8+FDMpgB5BOexbR2/45BPtbdOrZWe8ZXDdjucf4MVYP4q07EeBkIMd7+NG3ScqZz6FzxLYQ3+2h15EMRXoRl2A2J/twVQHy9VK+sKSS6VghRTs3RXbjClW8fFB+AcEHfj0U9pf2/6JdKLsz+uxvsQd4RoY/xp7YwbLYC8sfQYt4wfQvGE0d9qBNCntDfjC59F29Pi4cVqKzid6fhU/lWXQSc2wGR40IywM7oXyUxoeK2XfuUPYSfeLB4hA2hC9AcELxIWdRZFxFnLyOAG0Qt9IUdgTvINbeeg+cY+o/YHx927AxG8LAyFq5ZMTemarJIUjAVw9xwoZLhbizBDA+PYBD+JSLNIUMPPGgm2mS7Ghp2cTAECvG09hDTcipOaGQiFI0zGtVzsatn/tb/2Z7SfnC0rqXlFNij8jKAl7d+799XcLs/IEV01iQpInT0l11aSkJoO5w59N5h6Bc8zqExJTUmM1n8SURnvPtLNBFTUNgEnEE8hhzTI+AJbnx1zJLEdszni9xNM5s3usQVYAJt+5iFXAwL36IZAWNp85KITP3E35r0499eDsFydxk6Ztr/nC7pwdZ+3x9uyqbRXTx89/s/1/1u2nGU/XPjht4ZzhVJKkqcNG7Xg5eqJ4QmHRTe1uK9+4dMjk6SOPLWOYZzXEAUlKAE1JJ6MN7GVHhvsA+EjI8BQ8YH01iWJczWAMd+uJgOyqV9wuNQHnwPTujOpG2OPSywh2JDkF3Z2LN0CrzDoNst4zyTF5jPowIiDJtLqyy8Zp+7/66o2KzYV2ue2a+1dXPb969rNZUkK0cvhd2jta1Peb9s2dQ9fRjJGTfzzg+5Dys0Yz3RsNuvMO051RRNeYeNDX+ECsSBkRkBYnYAQnS3edNqRFRz8eoMXjUhNBL+JCaqqM5V0GfRKxACIEWHEuHg7NqcYEjbslDEDMg4Ew7Pf6vCbIvbjRv34Zuf9ebvy2uVurNygVO8ZxlbPXH/0PZ849QTveU7ZOEqUFq878PXfvn0umS5L4aEkpLWDymAx0fGrI404dr+vhGeUhxOQhMHkI5pbyMARhsoGux6SR4EYSnKBvVhmU0ZBGnMko6rBCImYROc0L9LKepU/+8sCUDUUV46xdXr5335eVq6umrcpr9/T0qjX0vI/ytGjUEG7BmR9X3z6CBn478OPYEbRh5H1a9ENGxwig4yOQRzzQMYxEvEiCXTJISMWqm8UrxKpuGc1LPIlG+oO7T7QirLZ7/Swtk1WXjLKw2FGhZEMWhE0rBXz61rH+2YZ4/AHdnEZQ2+63jkeFfVXlVV3DPV+f/67223yOm7Hh0UW1NFr0Iw01fFKW+sofvbrd0rs/bU8nimmP7H4X9KkPEFEjdSB+ciuJxDOrwPgjWQAk4WykHFaJCGoDWCyhQIlnExo+rJWEmk0URuJ9TP8QkSVixJLQJVjYvsN6W6ixAacjtT41654M9A06E8JtSsZSTtMq+cMlVesiVstdkmlWeVVJQ1v+MNMTrT9fB/xNJXlkmlEFDIBmmGFzOpPbmpkb9GIVtT1jcBrsL83FsE9mKMZuNl1WoHYAbqcR3XL9co0g25ONyToTcDwZ0htA/2pbe/OKIFOeIr3a0HqnJ6ZIRw/eu7HIUfrDBwOVPum9H7256oWijeX7j1Y+DyqVm/PM9Kq1hkqVjthy7h8f/5odKM0I7Fi75JahtM2v++vH3UH/GFmpNXygx6YqCEtfgI14yAAD41jDuq9yoq9yNvkqb6N9cyE0cZvhp7CCYvMw1ACmTQy8GfNO4HmD+kyHSa6q7FJbuemVymUzZr6YA27ontET/vFNtJRbrTw7f3xUYrq+BTaVCfthc76x/BWVBAOl0KIB5dQbUM7GBhQsiQ2oLRUVFUK3c2+K5Rs34jXPP6L1p3lwTSdQ2ZUwsaI0BQvAFZdCMc5hT99VoMp2PTMG2ODSpeoOGfVRXpdJrCKUje2Te+2urr6hYyqefzStkAoV2shS0TqzUnjy3MTq7VZTeqxHtQZ4jHNljlhdFOtCIs6X8XYiYvA11Ud4OyvNMFZfuj4ktlofWlM5hy5/mNMG0a/5pVr/h6SEhpH0gKglRF8VOWf0P7CHJr6mkEbo0XppbUuFlHDmR/jOCsgH5oJdZGGuyHCLKwXrQGgWqCJKXBjtRPGB4Wazi2Xp2pHlYkUPVuJng6hY+lRzcDJE1w8lVQZ1UVLQgBVZVuN86IsCLSoyfqY+/guUyNtcoVaMt3XeUjmrOrPT9gVbdlU+MmfZCjed/tjsuU+lCd1q7hxbOXPq/O//E13KTX/7xa1LTElStIKbfuCl+ROj5pjuHwH6Wuh+I3VoAJfXeo9BjE2+SPf9F+n+OFtndbryauWyeXPWBIVufx8z8fPj0Ync8p0rF02K2pnu48xmAuznorkq+v83V8X8OEllXWNS1KIsAhjm8BEqaecOf6Gdrdz9cvWevRs37ubiAqdwsupU4BftQ9rpl13ncZoq8Bo6TaOes1obJYiwN4ylQ4kBa6T6ZuyCWApJQCwAybrtcC5WJGyOaWRO5xpgGrt0AabxGJxrxDSJtCWmKXV22cRAzdRNXdqtmrZ63fqq6c9ka6PELzYOK4lhmttvin7IbRtadmK/7wMq3DtC9/Gj+A+M/d9pZOm4/yYfnwKZg63gAgwA4kaY29K/IxW2RixglplbbwULFGGJs3UsMLm6S9zYiqINkxgWKH+2fbtn7m3EAnfcvuZsNpc/6FbEAj+V/pVzD52infsw5q+554EOF+RcTd5R76vHxYGKyI2tBsizcNrHjf4jjsTuWQAO+3TLMuUwxbzHWVA10Z/ncA2d8kS60K02bky5SSiX5k6O+mC9SYA9VsN6Hci8S9SL6GXrRaT1epHPD7gKC0YOI+80p8vuWjFODuI0mJIlKwmx+hFx+BpH0HUXHBtBb71+xMr1RZ0Bz5vUygVPz16377WPN78yvoyb/My8Bx6Y8tIbe7+sfbN8PKXtpPvGTb35xqmZuQ/NmbVp2O3zAd4PXTjlxv4lWXlPzVtcPXLoDInxPPv8T9wUcRDgl9tIxIM8iItBF1GHLqbm0CXWYYpvHC6Nt7SELtgMRHBAZMWpAxhZnwdrhruyC+Xs16f//POA3qlFme602/OmzgX4Qn3aTyXRq8YNFaWhdsfjz3FvwP5Wgow+F7rpfgwtUy+3SmZjk1iE8l5QhFLsrDDJ/BirQ8msKoklFSqx2kqzqlRRI6rNXlm5eNaStRmV46ydlcpN++hb3L3RZW9unjGe5869qd55N8aN9uBX98N+mtWl6JXrUu1n0dyglE2zZ2mlo4RuDZ/NncvnnXsTvno1IeIBuJ6PfGPMHjmcEIfwojXUhH2GVktT3sbS1L6bfj7dSmnqtxPvtihNWUS9NNXzvVND9XmEOEiD94qKHSead+7bd/IelsuaXDVmkwVy2cbSFfzZLJeFc5jLbufMFptew4J8treVM8HfjmaVLCO51YtYBjc8wI3Yq1FcCF4961A7Kfz93d93ljocnKUdLPulQOp44m6hWzTrjTe4L6NZb77JfXnuTe74669HU4ArIeB/LfCrZd2K/nd1qxCdqz3xCA3SrEe1J+ich7X3tPe4HM6jXUt3Rk9Gj9D3tTCsEQTMfIjJxJiVh2tjh9UeVmVEyfEFyHwgTW4uaJAz0yID4F5Fg4tou2yJXveglpv74HxfD4cjrjBu4MhAMSjAT/P5p88lTlppEcdw4uS/Lme2iDc3bGG61aKehU6IN/139axh3MPRJbwzOoXbM4SfeffQhoVGPauvNoFbKfUkaeRGAuZc63eQRCGPzQhBbLMU1JrZCTajk8wwKHYvIM3NYJT6gZ8ebPpTGY3b4lZFux4OWABjdo23gsQK+ya9rt/3/imrXkmae9/wO+4YXjEv9ZVVU7j0sQ/OPL7pVNGgdoceOz5pbVbOuonHHjuYe1PRyZePzVjK9hrRfqV+ViNLIS1bpa569mOUy8ByI6Xar9LuM33Y9yxA450xGtMKaolOo79AjQcaHQW1ziYa+TrFqvep3QaNfhIbbIjHqKc43KrVzWjsRRmJOkkoXpbH+1g+L5kscytH3nXXyPvmJu14rryionzVK9qu3IOPHStfmxlcO+X44++0G1R0atPxGYvHLp1x7OWTRbo8HqPVQj3vIYnkJoLo3GKtR73iUb+SGLHGXWnM3IHmZCyuJyKIZJNQFuylk0S2W1XywG8eQrTdmCbEEKjHE7+edLHk0fdY1cy/Pjn0qvHFAyaUrJ0+5IkhvSd2HXQP/eKBHTfcWByeV+Kcv+u6QV0Kp4/R9zjjvI3/TswmQTJDr5UoaWE1XqyPBJj7D2QY5RK8OcEJpwWWUQniRRWTDL1vns6yGoyWRgklSa5HKWAJJT0D6MEyl15CqbHaEpP1yFjY2d3yfqymKko8uyUrm5vxwd8rq97l+cYyynhO+MdTlbvf58y5R2hOwldfyu+tblZIWbrP/d1xP80BGvH+wo7sXqJn9fuI1FRIlxJDEQnTeAdfX0toimTPU9xhVn/1hmpsKZIZKAyy+1Nk7DwzdMATnLfgUyzoOxUfYoM2QHCbAoULs5QfFC0ePh3fhgVML346Ppl9Wkfe7no1E6ck0KoTEXmrksMAvWGeybTxjjScKQbJmnBmPtyLFuZc867tH5HXd/F8+dLK2U/Y6D7talM4n6cNg63XXmviFpTRtu/Vf7hV+ttSZY12uEwZv693aanz+0ol1kNaDvYWjxUCR7M6fa1LdhA7G4BzIYIM1Xp97ARAAy+vQwM/wiGkzc7GHSN2NppgtwFhUijiYJmfwwV/eUMMKtsdsVq/r0WtH0jx6bUNcGX4r8MyWk03LtOK6b3acPqiNrxCv8GQThWVaAfu06hctq1M20mvhV86jl8revgs437XHiTWNVeJnWEWvS/WOOeJVeYErNizRjqWzOGvxn5YGBnrW7uVtt0ielbDf1jhHn/+J/EP8QDEHj8g1FV6/FedDmPa0QcHmQwx4gGrvGWCidSG8yyZkAiH4WxemN3wWIAW0oXtIs5F8vTRxwT9Zj2lrUvN18dqO8Jf6SGlowtxbq3EPqkW4e19bWX3DovTx2emhPXx7TzZvV2Kc6eTjrrR6C1kvQnf7NiYMW7NksBLjKdVtC3NoVXaaO0L7bBWchudSAVK6WRtuaZpDdqTNGnHM09uELjhk8ZNmjVz8vgJwznhxSef2cEdod2pot2kHdQOaANphPbQ6rW5dD71Ux/E3PnatorNn1c9JU2ZVD2/cuGLE6ZJT1d9xmQ2k6zle/ObiASZIU65YqA2fs2kOfdoJ6j3HkfsgEv10JnaTG0WnWkcXHB/EWlx9xCoNSkDmf1qyCxEuuNM50VSqwWQgPPNeNdlJyahToD0lbah2sTu7I3ExvstL5BXCCQUDikhFxNLu/YA/FPBVwfbhkJKagux4S2YRSHIA1BsGXh7oTsV9D8HhNcJpwKDxUpYrgUREnxT6Y43GFxGjpfoo+fRRBq7naTMkOYakOYRXZqTIAPj6CQmzai2HKTLPVn1l759e5gtZVbhxqG7tg8aP+Le568kzehA/pY5M/relZY4rn/Xtn18Lt/NuV1uvUF7ju65+frb9L7xNGEXPSK+CRJor1tiLblEj0flMfByen6fTMN+ftqHT/Jn4PtWSWvAa5VoA+hKuKoTpz5MDP7H1SvOWIBnd6uY6motumgsLpU37s5m96dIRL8P2CTrFVU9ySoKG/OWJcNmDh6bekfcoNFVT2qrenYv7mCe29syaPDwiUw/F4B+DojpZxE6Kh/Dk/BrAfVqJ+6hOdqRTxqP1tKFdJG2yKMtajzQ50vZHKspnc2xui47ySoX6Gltq5OsvAf4c9E4axEyrPlMKyU68/SZmaGwLq56xclF+UqTi+6LJhcpbqjZ+GL0XX0vxhCj5DOkiLw8BC8FsBeBmEkWiYgYaSQG7ywFiljHCj7YDjaLLKE31MFGAecdwqveUWlc7sxPxoAcr88tmTqzulIG6dnq5FKgtcpSm9g90YKN3RN9heElRuelJ5joZNzgFeeYuC90dgjGvpONe7+DpKyVnWNJLCOspkL8CoRikMogIwVcS7oewdIZwKoN6n8Fm0hEXJWRjiTKCbYrkxiLepemcjbGwysSyeezgMnpsyMgbxmQRffWpkf8rU2PJBhZe8Tp9hUXtz5BwqTRcozkLRTARcMkYodG/eON/YA/gMwukZRcvCMcZ4kPqx5gOD4dIqn59tCX+3QW+9ica22i/ldi09YRo8djrcwpXWLjMR632PtnyNaLtz4/hjtYv1v8GvQbrI/8j37Xl+IP6zO6mdb6iKux490uzRXreHdi2w/A9gMXd7wDLtxtREjKwY435nq+kBq6oOOdkC8oSXtF1Y8db1+zjrfPVRPv8+uPpEhMSvBgB8vfrEoA51jH2xefmKR3vP0J8YmNHe+A0fFOtgFscaVltu+AsEXxymp+AWt+411C3mSj+W33tNL8zr5s55uFkWbtb6m+ttX29x9MaZp64NP3tNYA52+OKRGv9ytBFtivzCQjrtSxzGqtY5ltdCy3Y8cyI/i/7VkyIi/XuDzHqLtk95K+0sw3PwuBVhPfbumb6X/lm5/VfbOwm13uXB/sT5HYcxoSxKMX+uYWVf/L+2bjeRVXKPwzb9B69Z+2ZX75cj0AbkPMJ+v7PdDok8c223EqeohAGO9tUjJCzQj4v/HKlyYu5jFap68L88iXJe+s7kbw/jespYKMPSQB51YvUU1NvEQ1NSnml2WvHwzyv6qoMslcWFa9k6nlRcVV/iddDryxT5x594MkFly4Ux+KIhEyUDuO6TRtPCW28RovT/A24cYEr4mKmuQ4C7yVoL+VUFCbrOd92GdKwCKXLOm3J1yRtJhcLqBuIvPlFxEn9GZSiMX9UUzHAiSHXN8qYmnbmlW0M6xiByKWNsFsfYRYzcy64uQ18xTBInilwUtH91/qFvG/l/1KzU9w2uEpVw7zNiqCvCQq6E7EsB/JcjFtLSz+8rShxbdC26XtozltrdvISy3puqyxfN6Sphhm6A+YwU9ScSb/YhST1hqKSTesZTugmITEFKQnTlaTki8HaAwqWuKa61vs/mKUMLL5jpntCFbxNMHKYjr2dC5h5RmXsPKAse9asPKkNGPbDtz25c2huRguMIlvW1JwsW2ktGA6Jc8Lx7l3xTqIRHns2Scie76YLOjBCJJH0UvMYLTWWKlfv3eosCgMiXCO6fnvSr4vr94gHPcd/dbNxiTA920SltKz4iesDnAjwYK3XgxWfAW1vJFGJsQy/CQ9wzfSd3wmDoZudxz4BwuPrPBByg6JZVO11dfsKUh6dN5017V9S0b3u65kYGF2VjiclV0otu83Gk6MGHFdTudw27aFXZDWMuEUdx5ipAd3BdhMEtmwBi/G+vO1Hj2t9TAx1Vr1cgJrbeHUGc9G59i8EClWeZeRM+q7aioAI2gqmzD46vWF+X1umnTLDSu7FPQW6e33Tbq+yDtk2qRru1y+jvK/f+9FbqvwHST7PPCddRv4en2ItmnqFb7yotCL21qG87FLuK3i3it+fonY1fj8cCFEZfZco8Zn1MSeakTY4Dt7Ro2o3x7Dvu0J877hk6+7SghtpV21t7fq+7zMdS7zrJvhV1VMhi923FGjvW9c53wHKlH+v76Onz3+bnjnijGfUut7+zS8LwP2wpmNZ+z1YRZw0RP2dNoU0cUqKDbjLiCDTEWS2egGu+k0RnK4kfB5zYg3WKCvab/8msYt7bHH+RlrGqRgeUUqVqzslqiWz/ZDJm1vxiiDXTgT0oX+Qd3/V2vqrDTWDFeO2di5cswhmrN9m/YpfAde0Z/jPS93s+cJYSWmn1EREczhMD4KQBUtoVCzpwvFxZ4uZJSJ8UkHism4w87beBegAQXwZ9dSKi8l55euZ//pOjGBrKUNrIYUIFQxxVyYTZ8XN8cEJ+jCYrXPCReVPOE6pXCd31teR+FCxqWarkPxOkapqrSVyhTb002Asd4TD4KHhXwyBwnOMB6dptjCqszjhGItoTlWO8Na2PpIxmcpshP4GEUeM8YaR44VeyHtC5TcOpWTsP4JMvImABdTc7F+lIodjvhQJJc9zSWXWLAThLVRlGOHZg9pseNDWuzGQ1p+nfzGNL197WAPabFjr3rn6bq951j6aXPVxEFamKe4XDVOlwPST/izWfoJ5zD9hICGqactzulq1o/OYNVWfbQyiOOV5ILxSvavecbVk9700ksvUedXxZN7W7pM6br5bS4YPYo/724qLu9s6XJf96+0U5yvbGNZ1mkadDnHuTw/vpUDf3rePCHLY50u2uZ3jx6HRvHPCNew+3X8pFKvjELOh0+w1MMR3/iAL3zWjtnpgfScRSapzng+W+t38qArAA2o9evRy+/C2bpaZ1P0ciG6tdoNPBVgD+iB7M0D/+Aohw/yJnkUnbfiBtpx5CZp65C/SM+HX5TE8f36ae3pP7T2XKI2lFZHf6BzqTaPPka1qUyPEPh1Zc/UIJ3kgIzH597+f+LPPhMAAHjaY2BkYGAAYqY1CuLx/DZfGeQ5GEDgHDPraRj9v/efIdsr9gQgl4OBCSQKAP2qCgwAAAB42mNgZGDgSPq7Fkgy/O/9f4rtFQNQBAUsBACcywcFAHjaNZJNSFRRGIafc853Z2rTohZu+lGiAknINv1trKZFP0ZWmxorNf8ycVqMkDpQlJQLIxCCEjWzRCmScBEExmyCpEXRrqBlizLJKGpr771Ni4f3fOec7573e7l+kcwKwP0s8ZYxf4Qr9of9luNytECXLZJ19eT9VQb9IKtDC+usn8NugBP+ENXuK1OhivX2mJvqmRM50S4OiBlxV9SKZnHKzTLsntNhZdrr445tohAmqEsfpdeWKbffFKMK+qMaijYiRlX3MBRNU/SVfLQ2jkdrtb+DYmpJZzOiiYL9kp6nEGXk4Z3eeklVdJYpW6I8Xcku+8Ie+0SFzXPOfeNh2MI2KeEktSGP8wc5Y7W0WZ5ReWqU5mwD9f4B+6xb6zxj7j1P3eflW+E79+N1ukyzaV9kkz71+Beq19Dlp9msejgssDW1ir3S7WKjOO0fkXGvmJWujHq5HWdvWc0/pNxfUxWKTKRauBgm6YszTnXQ6mvI615TGOdaktNIksebePYEzZrMG88g326eeyVfMcMxSU6qk3uxt0uMy8OTUKA1PIN0g/Ioqe/W//BB7P4Hi9IeabvO5Ok/0Q0mU9cZcJ36T2IayfpmcUHU6a0K5uI+30inaIm/adUcsx802E74C0holcIAAAB42mNgYNCBwjCGPsYCxj9MM5iNmMOYW5g3sXCx+LAUsPSxrGM5xirE6sC6hM2ErYFdjL2NfR+HA8cWjjucPJwqnG6ccZzHuPq4DnHrcE/ivsTDx+PCs4PnAy8fbxDvBN5tfGx8TnxT+G7w2/AvEZAT8BPoEtgkaCWYIzhH8JTgNyEeIRuhOKEKoRnCQcLbRKRE6kTuieqJrhH9IiYnFie2QGyXuJZ4kfgBCQWJFok9knaSfZLXJP9JTZM6Ic0ibSTdIb1E+peMDxDuk3WQXSJ7Ra5OboHcOvks+Qny5+Q/KegplCjMU/ilmKO4RUlA6Zqyk3KO8hEVE5UOlW+qKarn1NTUOtQ2qf1Td8EBg9QT1PPU29TnqR9Sf6bBoeGkUaOxTeODxgdNEU0rIPymFaeVBQDd1FqqAAAAAQAAAKEARAAFAAAAAAACAAEAAgAWAAABAAFRAAAAAHjadVLLSsNQED1Jq9IaRYuULoMLV22aVhGJIBVfWIoLLRbETfqyxT4kjYh7P8OvcVV/QvwUT26mNSlKuJMzcydnzswEQAZfSEBLpgAc8YRYg0EvxDrSqApOwEZdcBI5vAleQh7vgpcZnwpeQQXfglMwNFPwKra0vGADO1pF8Bruta7gddS1D8EbMPSs4E2k9W3BGeT0Gc8UWf1U8Cds/Q7nGGMEHybacPl2iVqMPeEVHvp4QE/dXjA2pjdAh16ZPZZorxlr8vg8tXn2LNdhZjTDjOQ4wmLj4N+cW9byMKEfaDRZ0eKxVe092sO5kt0YRyHCEefuk81UPfpkdtlzB0O+PTwyNkZ3oVMr5sVvgikNccIqnuL1aV2lM6wZaPcZD7QHelqMjOh3WNXEM3Fb5QRaemqqx5y6y7zQi3+TZ2RxHmWqsFWXPr90UOTzoh6LPL9cFvM96i5SeZRzwkgNl+zhDFe4oS0I5997/W9PDXI1ObvZn1RSHA3ptMpeBypq0wb7drivfdoy8XyDP0JQfA542m3Ou0+TcRTG8e+hpTcol9JSoCqKIiqI71taCqJCtS3ekIsWARVoUmxrgDaFd2hiTEx0AXVkZ1Q3Edlw0cHEwcEBBv1XlNLfAAnP8slzknNyKGM//56R5Kisg5SJCRNmyrFgxYYdBxVU4qSKamqoxUUdbjzU46WBRprwcYzjnKCZk5yihdOcoZWztHGO81ygnQ4u0sklNHT8dBEgSDcheujlMn1c4SrX6GeAMNe5QYQoMQa5yS1uc4e7DHGPYUYYZYz7PCDOOA+ZYJIpHvGYJ0wzwywJMfOK16zxjlXeSzkrvOUvH/jBHD/5RYrfpMmQY5kCz3nBS7GIVWxiZ4c/7IpDKqRSnFIl1VIjteKSOnGLR+rFyyc2+MIW3/jMJt/5KA1s81UapYk34rOk5gu5tG41FjOapkVKhjVlxDmcNhZTibyxMJ8wlp3ZQy1+qBkHW3Hfv3dQqSv9yi5lQBlUditDyh5lrzJcUld3dd3xNJMy8nPJxFK6NPLHSgZj5qiRzxZLdO+P/+/adfZ42j3OKRLCQBAF0Bkm+0JWE0Ex6LkCksTEUKikiuIGWCwYcHABOEQHReE5BYcJHWjG9fst/n/w/gj8zGpwlk3H+aXtKks1M4jbGvIVHod2ApZaNwyELEGoBRiyvItipL4wEcaUYMnyyUy+ZWQbn9ab4CDsF8FFODeCh3CvBB/hnQgBwq8IISL4V40RofyBQ0TTUkwj7OhEtUMmyHSjGSOTuWY2rI32PdNJPiQZL3TSQq4+STRSagAAAAFR3VVMAAA=) format('woff'); +} \ No newline at end of file diff --git a/plugins/UiConfig/media/img/loading.gif b/plugins/UiConfig/media/img/loading.gif new file mode 100644 index 00000000..27d0aa81 Binary files /dev/null and b/plugins/UiConfig/media/img/loading.gif differ diff --git a/plugins/UiConfig/media/js/ConfigStorage.coffee b/plugins/UiConfig/media/js/ConfigStorage.coffee new file mode 100644 index 00000000..654f0363 --- /dev/null +++ b/plugins/UiConfig/media/js/ConfigStorage.coffee @@ -0,0 +1,157 @@ +class ConfigStorage extends Class + constructor: (@config) -> + @items = [] + @createSections() + @setValues(@config) + + setValues: (values) -> + for section in @items + for item in section.items + if not values[item.key] + continue + item.value = @formatValue(values[item.key].value) + item.default = @formatValue(values[item.key].default) + item.pending = values[item.key].pending + values[item.key].item = item + + formatValue: (value) -> + if not value + return false + else if typeof(value) == "object" + return value.join("\n") + else if typeof(value) == "number" + return value.toString() + else + return value + + deformatValue: (value, type) -> + if type == "object" and typeof(value) == "string" + if not value.length + return value = null + else + return value.split("\n") + if type == "boolean" and not value + return false + else + return value + + createSections: -> + # Web Interface + section = @createSection("Web Interface") + + section.items.push + key: "open_browser" + title: "Open web browser on ZeroNet startup" + type: "checkbox" + + # Network + section = @createSection("Network") + section.items.push + key: "offline" + title: "Offline mode" + type: "checkbox" + description: "Disable network communication." + + section.items.push + key: "fileserver_ip_type" + title: "File server network" + type: "select" + options: [ + {title: "IPv4", value: "ipv4"} + {title: "IPv6", value: "ipv6"} + {title: "Dual (IPv4 & IPv6)", value: "dual"} + ] + description: "Accept incoming peers using IPv4 or IPv6 address. (default: dual)" + + section.items.push + key: "fileserver_port" + title: "File server port" + type: "text" + valid_pattern: /[0-9]*/ + description: "Other peers will use this port to reach your served sites. (default: 15441)" + + section.items.push + key: "ip_external" + title: "File server external ip" + type: "textarea" + placeholder: "Detect automatically" + description: "Your file server is accessible on these ips. (default: detect automatically)" + + section.items.push + title: "Tor" + key: "tor" + type: "select" + options: [ + {title: "Disable", value: "disable"} + {title: "Enable", value: "enable"} + {title: "Always", value: "always"} + ] + description: [ + "Disable: Don't connect to peers on Tor network", h("br"), + "Enable: Only use Tor for Tor network peers", h("br"), + "Always: Use Tor for every connections to hide your IP address (slower)" + ] + + section.items.push + title: "Use Tor bridges" + key: "tor_use_bridges" + type: "checkbox" + description: "Use obfuscated bridge relays to avoid network level Tor block (even slower)" + isHidden: -> + return not Page.server_info.tor_has_meek_bridges + + section.items.push + title: "Trackers" + key: "trackers" + type: "textarea" + description: "Discover new peers using these adresses" + + section.items.push + title: "Trackers files" + key: "trackers_file" + type: "text" + description: "Load additional list of torrent trackers dynamically, from a file" + placeholder: "Eg.: data/trackers.json" + value_pos: "fullwidth" + + section.items.push + title: "Proxy for tracker connections" + key: "trackers_proxy" + type: "select" + options: [ + {title: "Custom", value: ""} + {title: "Tor", value: "tor"} + {title: "Disable", value: "disable"} + ] + + section.items.push + title: "Custom socks proxy address for trackers" + key: "trackers_proxy" + type: "text" + placeholder: "Eg.: 127.0.0.1:1080" + value_pos: "fullwidth" + valid_pattern: /.+:[0-9]+/ + isHidden: => + Page.values["trackers_proxy"] in ["tor", "disable"] + + # Performance + section = @createSection("Performance") + + section.items.push + key: "log_level" + title: "Level of logging to file" + type: "select" + options: [ + {title: "Everything", value: "DEBUG"} + {title: "Only important messages", value: "INFO"} + {title: "Only errors", value: "ERROR"} + ] + + createSection: (title) => + section = {} + section.title = title + section.items = [] + @items.push(section) + return section + +window.ConfigStorage = ConfigStorage \ No newline at end of file diff --git a/plugins/UiConfig/media/js/ConfigView.coffee b/plugins/UiConfig/media/js/ConfigView.coffee new file mode 100644 index 00000000..a110a17d --- /dev/null +++ b/plugins/UiConfig/media/js/ConfigView.coffee @@ -0,0 +1,124 @@ +class ConfigView extends Class + constructor: () -> + @ + + render: -> + @config_storage.items.map @renderSection + + renderSection: (section) => + h("div.section", {key: section.title}, [ + h("h2", section.title), + h("div.config-items", section.items.map @renderSectionItem) + ]) + + handleResetClick: (e) => + node = e.currentTarget + config_key = node.attributes.config_key.value + default_value = node.attributes.default_value?.value + Page.cmd "wrapperConfirm", ["Reset #{config_key} value?", "Reset to default"], (res) => + if (res) + @values[config_key] = default_value + Page.projector.scheduleRender() + + renderSectionItem: (item) => + value_pos = item.value_pos + + if item.type == "textarea" + value_pos ?= "fullwidth" + else + value_pos ?= "right" + + value_changed = @config_storage.formatValue(@values[item.key]) != item.value + value_default = @config_storage.formatValue(@values[item.key]) == item.default + + if item.key in ["open_browser", "fileserver_port"] # Value default for some settings makes no sense + value_default = true + + marker_title = "Changed from default value: #{item.default} -> #{@values[item.key]}" + if item.pending + marker_title += " (change pending until client restart)" + + if item.isHidden?() + return null + + h("div.config-item", {key: item.title, enterAnimation: Animation.slideDown, exitAnimation: Animation.slideUpInout}, [ + h("div.title", [ + h("h3", item.title), + h("div.description", item.description) + ]) + h("div.value.value-#{value_pos}", + if item.type == "select" + @renderValueSelect(item) + else if item.type == "checkbox" + @renderValueCheckbox(item) + else if item.type == "textarea" + @renderValueTextarea(item) + else + @renderValueText(item) + h("a.marker", { + href: "#Reset", title: marker_title, + onclick: @handleResetClick, config_key: item.key, default_value: item.default, + classes: {default: value_default, changed: value_changed, visible: not value_default or value_changed or item.pending, pending: item.pending} + }, "\u2022") + ) + ]) + + # Values + handleInputChange: (e) => + node = e.target + config_key = node.attributes.config_key.value + @values[config_key] = node.value + Page.projector.scheduleRender() + + handleCheckboxChange: (e) => + node = e.currentTarget + config_key = node.attributes.config_key.value + value = not node.classList.contains("checked") + @values[config_key] = value + Page.projector.scheduleRender() + + renderValueText: (item) => + value = @values[item.key] + if not value + value = "" + h("input.input-#{item.type}", {type: item.type, config_key: item.key, value: value, placeholder: item.placeholder, oninput: @handleInputChange}) + + autosizeTextarea: (e) => + if e.currentTarget + # @handleInputChange(e) + node = e.currentTarget + else + node = e + height_before = node.style.height + if height_before + node.style.height = "0px" + h = node.offsetHeight + scrollh = node.scrollHeight + 20 + if scrollh > h + node.style.height = scrollh + "px" + else + node.style.height = height_before + + renderValueTextarea: (item) => + value = @values[item.key] + if not value + value = "" + h("textarea.input-#{item.type}.input-text",{ + type: item.type, config_key: item.key, oninput: @handleInputChange, afterCreate: @autosizeTextarea, + updateAnimation: @autosizeTextarea, value: value, placeholder: item.placeholder + }) + + renderValueCheckbox: (item) => + if @values[item.key] and @values[item.key] != "False" + checked = true + else + checked = false + h("div.checkbox", {onclick: @handleCheckboxChange, config_key: item.key, classes: {checked: checked}}, h("div.checkbox-skin")) + + renderValueSelect: (item) => + h("select.input-select", {config_key: item.key, oninput: @handleInputChange}, + item.options.map (option) => + h("option", {selected: option.value == @values[item.key], value: option.value}, option.title) + ) + +window.ConfigView = ConfigView \ No newline at end of file diff --git a/plugins/UiConfig/media/js/UiConfig.coffee b/plugins/UiConfig/media/js/UiConfig.coffee new file mode 100644 index 00000000..4ee3a1c6 --- /dev/null +++ b/plugins/UiConfig/media/js/UiConfig.coffee @@ -0,0 +1,127 @@ +window.h = maquette.h + +class UiConfig extends ZeroFrame + init: -> + @save_visible = true + @config = null # Setting currently set on the server + @values = null # Entered values on the page + @config_view = new ConfigView() + window.onbeforeunload = => + if @getValuesChanged().length > 0 + return true + else + return null + + onOpenWebsocket: => + @cmd("wrapperSetTitle", "Config - ZeroNet") + @cmd "serverInfo", {}, (server_info) => + @server_info = server_info + @restart_loading = false + @updateConfig() + + updateConfig: (cb) => + @cmd "configList", [], (res) => + @config = res + @values = {} + @config_storage = new ConfigStorage(@config) + @config_view.values = @values + @config_view.config_storage = @config_storage + for key, item of res + value = item.value + @values[key] = @config_storage.formatValue(value) + @projector.scheduleRender() + cb?() + + createProjector: => + @projector = maquette.createProjector() + @projector.replace($("#content"), @render) + @projector.replace($("#bottom-save"), @renderBottomSave) + @projector.replace($("#bottom-restart"), @renderBottomRestart) + + getValuesChanged: => + values_changed = [] + for key, value of @values + if @config_storage.formatValue(value) != @config_storage.formatValue(@config[key]?.value) + values_changed.push({key: key, value: value}) + return values_changed + + getValuesPending: => + values_pending = [] + for key, item of @config + if item.pending + values_pending.push(key) + return values_pending + + saveValues: (cb) => + changed_values = @getValuesChanged() + for item, i in changed_values + last = i == changed_values.length - 1 + value = @config_storage.deformatValue(item.value, typeof(@config[item.key].default)) + value_same_as_default = JSON.stringify(@config[item.key].default) == JSON.stringify(value) + if value_same_as_default + value = null + + if @config[item.key].item.valid_pattern and not @config[item.key].item.isHidden?() + match = value.match(@config[item.key].item.valid_pattern) + if not match or match[0] != value + message = "Invalid value of #{@config[item.key].item.title}: #{value} (does not matches #{@config[item.key].item.valid_pattern})" + Page.cmd("wrapperNotification", ["error", message]) + cb(false) + break + + @saveValue(item.key, value, if last then cb else null) + + saveValue: (key, value, cb) => + if key == "open_browser" + if value + value = "default_browser" + else + value = "False" + + Page.cmd "configSet", [key, value], (res) => + if res != "ok" + Page.cmd "wrapperNotification", ["error", res.error] + cb?(true) + + render: => + if not @config + return h("div.content") + + h("div.content", [ + @config_view.render() + ]) + + handleSaveClick: => + @save_loading = true + @logStart "Save" + @saveValues (success) => + @save_loading = false + @logEnd "Save" + if success + @updateConfig() + Page.projector.scheduleRender() + return false + + renderBottomSave: => + values_changed = @getValuesChanged() + h("div.bottom.bottom-save", {classes: {visible: values_changed.length}}, h("div.bottom-content", [ + h("div.title", "#{values_changed.length} configuration item value changed"), + h("a.button.button-submit.button-save", {href: "#Save", classes: {loading: @save_loading}, onclick: @handleSaveClick}, "Save settings") + ])) + + handleRestartClick: => + @restart_loading = true + Page.cmd("serverShutdown", {restart: true}) + Page.projector.scheduleRender() + return false + + renderBottomRestart: => + values_pending = @getValuesPending() + values_changed = @getValuesChanged() + h("div.bottom.bottom-restart", {classes: {visible: values_pending.length and not values_changed.length}}, h("div.bottom-content", [ + h("div.title", "Some changed settings requires restart"), + h("a.button.button-submit.button-restart", {href: "#Restart", classes: {loading: @restart_loading}, onclick: @handleRestartClick}, "Restart ZeroNet client") + ])) + +window.Page = new UiConfig() +window.Page.createProjector() diff --git a/plugins/UiConfig/media/js/all.js b/plugins/UiConfig/media/js/all.js new file mode 100644 index 00000000..99c3a6d8 --- /dev/null +++ b/plugins/UiConfig/media/js/all.js @@ -0,0 +1,1945 @@ + +/* ---- plugins/UiConfig/media/js/lib/Class.coffee ---- */ + + +(function() { + var Class, + slice = [].slice; + + Class = (function() { + function Class() {} + + Class.prototype.trace = true; + + Class.prototype.log = function() { + var args; + args = 1 <= arguments.length ? slice.call(arguments, 0) : []; + if (!this.trace) { + return; + } + if (typeof console === 'undefined') { + return; + } + args.unshift("[" + this.constructor.name + "]"); + console.log.apply(console, args); + return this; + }; + + Class.prototype.logStart = function() { + var args, name; + name = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; + if (!this.trace) { + return; + } + this.logtimers || (this.logtimers = {}); + this.logtimers[name] = +(new Date); + if (args.length > 0) { + this.log.apply(this, ["" + name].concat(slice.call(args), ["(started)"])); + } + return this; + }; + + Class.prototype.logEnd = function() { + var args, ms, name; + name = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; + ms = +(new Date) - this.logtimers[name]; + this.log.apply(this, ["" + name].concat(slice.call(args), ["(Done in " + ms + "ms)"])); + return this; + }; + + return Class; + + })(); + + window.Class = Class; + +}).call(this); + + +/* ---- plugins/UiConfig/media/js/lib/Promise.coffee ---- */ + + +(function() { + var Promise, + slice = [].slice; + + Promise = (function() { + Promise.when = function() { + var args, fn, i, len, num_uncompleted, promise, task, task_id, tasks; + tasks = 1 <= arguments.length ? slice.call(arguments, 0) : []; + num_uncompleted = tasks.length; + args = new Array(num_uncompleted); + promise = new Promise(); + fn = function(task_id) { + return task.then(function() { + args[task_id] = Array.prototype.slice.call(arguments); + num_uncompleted--; + if (num_uncompleted === 0) { + return promise.complete.apply(promise, args); + } + }); + }; + for (task_id = i = 0, len = tasks.length; i < len; task_id = ++i) { + task = tasks[task_id]; + fn(task_id); + } + return promise; + }; + + function Promise() { + this.resolved = false; + this.end_promise = null; + this.result = null; + this.callbacks = []; + } + + Promise.prototype.resolve = function() { + var back, callback, i, len, ref; + if (this.resolved) { + return false; + } + this.resolved = true; + this.data = arguments; + if (!arguments.length) { + this.data = [true]; + } + this.result = this.data[0]; + ref = this.callbacks; + for (i = 0, len = ref.length; i < len; i++) { + callback = ref[i]; + back = callback.apply(callback, this.data); + } + if (this.end_promise) { + return this.end_promise.resolve(back); + } + }; + + Promise.prototype.fail = function() { + return this.resolve(false); + }; + + Promise.prototype.then = function(callback) { + if (this.resolved === true) { + callback.apply(callback, this.data); + return; + } + this.callbacks.push(callback); + return this.end_promise = new Promise(); + }; + + return Promise; + + })(); + + window.Promise = Promise; + + + /* + s = Date.now() + log = (text) -> + console.log Date.now()-s, Array.prototype.slice.call(arguments).join(", ") + + log "Started" + + cmd = (query) -> + p = new Promise() + setTimeout ( -> + p.resolve query+" Result" + ), 100 + return p + + back = cmd("SELECT * FROM message").then (res) -> + log res + return "Return from query" + .then (res) -> + log "Back then", res + + log "Query started", back + */ + +}).call(this); + + +/* ---- plugins/UiConfig/media/js/lib/Prototypes.coffee ---- */ + + +(function() { + String.prototype.startsWith = function(s) { + return this.slice(0, s.length) === s; + }; + + String.prototype.endsWith = function(s) { + return s === '' || this.slice(-s.length) === s; + }; + + String.prototype.repeat = function(count) { + return new Array(count + 1).join(this); + }; + + window.isEmpty = function(obj) { + var key; + for (key in obj) { + return false; + } + return true; + }; + +}).call(this); + + +/* ---- plugins/UiConfig/media/js/lib/maquette.js ---- */ + + +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(['exports'], factory); + } else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') { + // CommonJS + factory(exports); + } else { + // Browser globals + factory(root.maquette = {}); + } +}(this, function (exports) { + 'use strict'; + ; + ; + ; + ; + var NAMESPACE_W3 = 'http://www.w3.org/'; + var NAMESPACE_SVG = NAMESPACE_W3 + '2000/svg'; + var NAMESPACE_XLINK = NAMESPACE_W3 + '1999/xlink'; + // Utilities + var emptyArray = []; + var extend = function (base, overrides) { + var result = {}; + Object.keys(base).forEach(function (key) { + result[key] = base[key]; + }); + if (overrides) { + Object.keys(overrides).forEach(function (key) { + result[key] = overrides[key]; + }); + } + return result; + }; + // Hyperscript helper functions + var same = function (vnode1, vnode2) { + if (vnode1.vnodeSelector !== vnode2.vnodeSelector) { + return false; + } + if (vnode1.properties && vnode2.properties) { + if (vnode1.properties.key !== vnode2.properties.key) { + return false; + } + return vnode1.properties.bind === vnode2.properties.bind; + } + return !vnode1.properties && !vnode2.properties; + }; + var toTextVNode = function (data) { + return { + vnodeSelector: '', + properties: undefined, + children: undefined, + text: data.toString(), + domNode: null + }; + }; + var appendChildren = function (parentSelector, insertions, main) { + for (var i = 0; i < insertions.length; i++) { + var item = insertions[i]; + if (Array.isArray(item)) { + appendChildren(parentSelector, item, main); + } else { + if (item !== null && item !== undefined) { + if (!item.hasOwnProperty('vnodeSelector')) { + item = toTextVNode(item); + } + main.push(item); + } + } + } + }; + // Render helper functions + var missingTransition = function () { + throw new Error('Provide a transitions object to the projectionOptions to do animations'); + }; + var DEFAULT_PROJECTION_OPTIONS = { + namespace: undefined, + eventHandlerInterceptor: undefined, + styleApplyer: function (domNode, styleName, value) { + // Provides a hook to add vendor prefixes for browsers that still need it. + domNode.style[styleName] = value; + }, + transitions: { + enter: missingTransition, + exit: missingTransition + } + }; + var applyDefaultProjectionOptions = function (projectorOptions) { + return extend(DEFAULT_PROJECTION_OPTIONS, projectorOptions); + }; + var checkStyleValue = function (styleValue) { + if (typeof styleValue !== 'string') { + throw new Error('Style values must be strings'); + } + }; + var setProperties = function (domNode, properties, projectionOptions) { + if (!properties) { + return; + } + var eventHandlerInterceptor = projectionOptions.eventHandlerInterceptor; + var propNames = Object.keys(properties); + var propCount = propNames.length; + for (var i = 0; i < propCount; i++) { + var propName = propNames[i]; + /* tslint:disable:no-var-keyword: edge case */ + var propValue = properties[propName]; + /* tslint:enable:no-var-keyword */ + if (propName === 'className') { + throw new Error('Property "className" is not supported, use "class".'); + } else if (propName === 'class') { + if (domNode.className) { + // May happen if classes is specified before class + domNode.className += ' ' + propValue; + } else { + domNode.className = propValue; + } + } else if (propName === 'classes') { + // object with string keys and boolean values + var classNames = Object.keys(propValue); + var classNameCount = classNames.length; + for (var j = 0; j < classNameCount; j++) { + var className = classNames[j]; + if (propValue[className]) { + domNode.classList.add(className); + } + } + } else if (propName === 'styles') { + // object with string keys and string (!) values + var styleNames = Object.keys(propValue); + var styleCount = styleNames.length; + for (var j = 0; j < styleCount; j++) { + var styleName = styleNames[j]; + var styleValue = propValue[styleName]; + if (styleValue) { + checkStyleValue(styleValue); + projectionOptions.styleApplyer(domNode, styleName, styleValue); + } + } + } else if (propName === 'key') { + continue; + } else if (propValue === null || propValue === undefined) { + continue; + } else { + var type = typeof propValue; + if (type === 'function') { + if (propName.lastIndexOf('on', 0) === 0) { + if (eventHandlerInterceptor) { + propValue = eventHandlerInterceptor(propName, propValue, domNode, properties); // intercept eventhandlers + } + if (propName === 'oninput') { + (function () { + // record the evt.target.value, because IE and Edge sometimes do a requestAnimationFrame between changing value and running oninput + var oldPropValue = propValue; + propValue = function (evt) { + evt.target['oninput-value'] = evt.target.value; + // may be HTMLTextAreaElement as well + oldPropValue.apply(this, [evt]); + }; + }()); + } + domNode[propName] = propValue; + } + } else if (type === 'string' && propName !== 'value' && propName !== 'innerHTML') { + if (projectionOptions.namespace === NAMESPACE_SVG && propName === 'href') { + domNode.setAttributeNS(NAMESPACE_XLINK, propName, propValue); + } else { + domNode.setAttribute(propName, propValue); + } + } else { + domNode[propName] = propValue; + } + } + } + }; + var updateProperties = function (domNode, previousProperties, properties, projectionOptions) { + if (!properties) { + return; + } + var propertiesUpdated = false; + var propNames = Object.keys(properties); + var propCount = propNames.length; + for (var i = 0; i < propCount; i++) { + var propName = propNames[i]; + // assuming that properties will be nullified instead of missing is by design + var propValue = properties[propName]; + var previousValue = previousProperties[propName]; + if (propName === 'class') { + if (previousValue !== propValue) { + throw new Error('"class" property may not be updated. Use the "classes" property for conditional css classes.'); + } + } else if (propName === 'classes') { + var classList = domNode.classList; + var classNames = Object.keys(propValue); + var classNameCount = classNames.length; + for (var j = 0; j < classNameCount; j++) { + var className = classNames[j]; + var on = !!propValue[className]; + var previousOn = !!previousValue[className]; + if (on === previousOn) { + continue; + } + propertiesUpdated = true; + if (on) { + classList.add(className); + } else { + classList.remove(className); + } + } + } else if (propName === 'styles') { + var styleNames = Object.keys(propValue); + var styleCount = styleNames.length; + for (var j = 0; j < styleCount; j++) { + var styleName = styleNames[j]; + var newStyleValue = propValue[styleName]; + var oldStyleValue = previousValue[styleName]; + if (newStyleValue === oldStyleValue) { + continue; + } + propertiesUpdated = true; + if (newStyleValue) { + checkStyleValue(newStyleValue); + projectionOptions.styleApplyer(domNode, styleName, newStyleValue); + } else { + projectionOptions.styleApplyer(domNode, styleName, ''); + } + } + } else { + if (!propValue && typeof previousValue === 'string') { + propValue = ''; + } + if (propName === 'value') { + if (domNode[propName] !== propValue && domNode['oninput-value'] !== propValue) { + domNode[propName] = propValue; + // Reset the value, even if the virtual DOM did not change + domNode['oninput-value'] = undefined; + } + // else do not update the domNode, otherwise the cursor position would be changed + if (propValue !== previousValue) { + propertiesUpdated = true; + } + } else if (propValue !== previousValue) { + var type = typeof propValue; + if (type === 'function') { + throw new Error('Functions may not be updated on subsequent renders (property: ' + propName + '). Hint: declare event handler functions outside the render() function.'); + } + if (type === 'string' && propName !== 'innerHTML') { + if (projectionOptions.namespace === NAMESPACE_SVG && propName === 'href') { + domNode.setAttributeNS(NAMESPACE_XLINK, propName, propValue); + } else { + domNode.setAttribute(propName, propValue); + } + } else { + if (domNode[propName] !== propValue) { + domNode[propName] = propValue; + } + } + propertiesUpdated = true; + } + } + } + return propertiesUpdated; + }; + var findIndexOfChild = function (children, sameAs, start) { + if (sameAs.vnodeSelector !== '') { + // Never scan for text-nodes + for (var i = start; i < children.length; i++) { + if (same(children[i], sameAs)) { + return i; + } + } + } + return -1; + }; + var nodeAdded = function (vNode, transitions) { + if (vNode.properties) { + var enterAnimation = vNode.properties.enterAnimation; + if (enterAnimation) { + if (typeof enterAnimation === 'function') { + enterAnimation(vNode.domNode, vNode.properties); + } else { + transitions.enter(vNode.domNode, vNode.properties, enterAnimation); + } + } + } + }; + var nodeToRemove = function (vNode, transitions) { + var domNode = vNode.domNode; + if (vNode.properties) { + var exitAnimation = vNode.properties.exitAnimation; + if (exitAnimation) { + domNode.style.pointerEvents = 'none'; + var removeDomNode = function () { + if (domNode.parentNode) { + domNode.parentNode.removeChild(domNode); + } + }; + if (typeof exitAnimation === 'function') { + exitAnimation(domNode, removeDomNode, vNode.properties); + return; + } else { + transitions.exit(vNode.domNode, vNode.properties, exitAnimation, removeDomNode); + return; + } + } + } + if (domNode.parentNode) { + domNode.parentNode.removeChild(domNode); + } + }; + var checkDistinguishable = function (childNodes, indexToCheck, parentVNode, operation) { + var childNode = childNodes[indexToCheck]; + if (childNode.vnodeSelector === '') { + return; // Text nodes need not be distinguishable + } + var properties = childNode.properties; + var key = properties ? properties.key === undefined ? properties.bind : properties.key : undefined; + if (!key) { + for (var i = 0; i < childNodes.length; i++) { + if (i !== indexToCheck) { + var node = childNodes[i]; + if (same(node, childNode)) { + if (operation === 'added') { + throw new Error(parentVNode.vnodeSelector + ' had a ' + childNode.vnodeSelector + ' child ' + 'added, but there is now more than one. You must add unique key properties to make them distinguishable.'); + } else { + throw new Error(parentVNode.vnodeSelector + ' had a ' + childNode.vnodeSelector + ' child ' + 'removed, but there were more than one. You must add unique key properties to make them distinguishable.'); + } + } + } + } + } + }; + var createDom; + var updateDom; + var updateChildren = function (vnode, domNode, oldChildren, newChildren, projectionOptions) { + if (oldChildren === newChildren) { + return false; + } + oldChildren = oldChildren || emptyArray; + newChildren = newChildren || emptyArray; + var oldChildrenLength = oldChildren.length; + var newChildrenLength = newChildren.length; + var transitions = projectionOptions.transitions; + var oldIndex = 0; + var newIndex = 0; + var i; + var textUpdated = false; + while (newIndex < newChildrenLength) { + var oldChild = oldIndex < oldChildrenLength ? oldChildren[oldIndex] : undefined; + var newChild = newChildren[newIndex]; + if (oldChild !== undefined && same(oldChild, newChild)) { + textUpdated = updateDom(oldChild, newChild, projectionOptions) || textUpdated; + oldIndex++; + } else { + var findOldIndex = findIndexOfChild(oldChildren, newChild, oldIndex + 1); + if (findOldIndex >= 0) { + // Remove preceding missing children + for (i = oldIndex; i < findOldIndex; i++) { + nodeToRemove(oldChildren[i], transitions); + checkDistinguishable(oldChildren, i, vnode, 'removed'); + } + textUpdated = updateDom(oldChildren[findOldIndex], newChild, projectionOptions) || textUpdated; + oldIndex = findOldIndex + 1; + } else { + // New child + createDom(newChild, domNode, oldIndex < oldChildrenLength ? oldChildren[oldIndex].domNode : undefined, projectionOptions); + nodeAdded(newChild, transitions); + checkDistinguishable(newChildren, newIndex, vnode, 'added'); + } + } + newIndex++; + } + if (oldChildrenLength > oldIndex) { + // Remove child fragments + for (i = oldIndex; i < oldChildrenLength; i++) { + nodeToRemove(oldChildren[i], transitions); + checkDistinguishable(oldChildren, i, vnode, 'removed'); + } + } + return textUpdated; + }; + var addChildren = function (domNode, children, projectionOptions) { + if (!children) { + return; + } + for (var i = 0; i < children.length; i++) { + createDom(children[i], domNode, undefined, projectionOptions); + } + }; + var initPropertiesAndChildren = function (domNode, vnode, projectionOptions) { + addChildren(domNode, vnode.children, projectionOptions); + // children before properties, needed for value property of . + if (vnode.text) { + domNode.textContent = vnode.text; + } + setProperties(domNode, vnode.properties, projectionOptions); + if (vnode.properties && vnode.properties.afterCreate) { + vnode.properties.afterCreate(domNode, projectionOptions, vnode.vnodeSelector, vnode.properties, vnode.children); + } + }; + createDom = function (vnode, parentNode, insertBefore, projectionOptions) { + var domNode, i, c, start = 0, type, found; + var vnodeSelector = vnode.vnodeSelector; + if (vnodeSelector === '') { + domNode = vnode.domNode = document.createTextNode(vnode.text); + if (insertBefore !== undefined) { + parentNode.insertBefore(domNode, insertBefore); + } else { + parentNode.appendChild(domNode); + } + } else { + for (i = 0; i <= vnodeSelector.length; ++i) { + c = vnodeSelector.charAt(i); + if (i === vnodeSelector.length || c === '.' || c === '#') { + type = vnodeSelector.charAt(start - 1); + found = vnodeSelector.slice(start, i); + if (type === '.') { + domNode.classList.add(found); + } else if (type === '#') { + domNode.id = found; + } else { + if (found === 'svg') { + projectionOptions = extend(projectionOptions, { namespace: NAMESPACE_SVG }); + } + if (projectionOptions.namespace !== undefined) { + domNode = vnode.domNode = document.createElementNS(projectionOptions.namespace, found); + } else { + domNode = vnode.domNode = document.createElement(found); + } + if (insertBefore !== undefined) { + parentNode.insertBefore(domNode, insertBefore); + } else { + parentNode.appendChild(domNode); + } + } + start = i + 1; + } + } + initPropertiesAndChildren(domNode, vnode, projectionOptions); + } + }; + updateDom = function (previous, vnode, projectionOptions) { + var domNode = previous.domNode; + var textUpdated = false; + if (previous === vnode) { + return false; // By contract, VNode objects may not be modified anymore after passing them to maquette + } + var updated = false; + if (vnode.vnodeSelector === '') { + if (vnode.text !== previous.text) { + var newVNode = document.createTextNode(vnode.text); + domNode.parentNode.replaceChild(newVNode, domNode); + vnode.domNode = newVNode; + textUpdated = true; + return textUpdated; + } + } else { + if (vnode.vnodeSelector.lastIndexOf('svg', 0) === 0) { + projectionOptions = extend(projectionOptions, { namespace: NAMESPACE_SVG }); + } + if (previous.text !== vnode.text) { + updated = true; + if (vnode.text === undefined) { + domNode.removeChild(domNode.firstChild); // the only textnode presumably + } else { + domNode.textContent = vnode.text; + } + } + updated = updateChildren(vnode, domNode, previous.children, vnode.children, projectionOptions) || updated; + updated = updateProperties(domNode, previous.properties, vnode.properties, projectionOptions) || updated; + if (vnode.properties && vnode.properties.afterUpdate) { + vnode.properties.afterUpdate(domNode, projectionOptions, vnode.vnodeSelector, vnode.properties, vnode.children); + } + } + if (updated && vnode.properties && vnode.properties.updateAnimation) { + vnode.properties.updateAnimation(domNode, vnode.properties, previous.properties); + } + vnode.domNode = previous.domNode; + return textUpdated; + }; + var createProjection = function (vnode, projectionOptions) { + return { + update: function (updatedVnode) { + if (vnode.vnodeSelector !== updatedVnode.vnodeSelector) { + throw new Error('The selector for the root VNode may not be changed. (consider using dom.merge and add one extra level to the virtual DOM)'); + } + updateDom(vnode, updatedVnode, projectionOptions); + vnode = updatedVnode; + }, + domNode: vnode.domNode + }; + }; + ; + // The other two parameters are not added here, because the Typescript compiler creates surrogate code for desctructuring 'children'. + exports.h = function (selector) { + var properties = arguments[1]; + if (typeof selector !== 'string') { + throw new Error(); + } + var childIndex = 1; + if (properties && !properties.hasOwnProperty('vnodeSelector') && !Array.isArray(properties) && typeof properties === 'object') { + childIndex = 2; + } else { + // Optional properties argument was omitted + properties = undefined; + } + var text = undefined; + var children = undefined; + var argsLength = arguments.length; + // Recognize a common special case where there is only a single text node + if (argsLength === childIndex + 1) { + var onlyChild = arguments[childIndex]; + if (typeof onlyChild === 'string') { + text = onlyChild; + } else if (onlyChild !== undefined && onlyChild.length === 1 && typeof onlyChild[0] === 'string') { + text = onlyChild[0]; + } + } + if (text === undefined) { + children = []; + for (; childIndex < arguments.length; childIndex++) { + var child = arguments[childIndex]; + if (child === null || child === undefined) { + continue; + } else if (Array.isArray(child)) { + appendChildren(selector, child, children); + } else if (child.hasOwnProperty('vnodeSelector')) { + children.push(child); + } else { + children.push(toTextVNode(child)); + } + } + } + return { + vnodeSelector: selector, + properties: properties, + children: children, + text: text === '' ? undefined : text, + domNode: null + }; + }; + /** + * Contains simple low-level utility functions to manipulate the real DOM. + */ + exports.dom = { + /** + * Creates a real DOM tree from `vnode`. The [[Projection]] object returned will contain the resulting DOM Node in + * its [[Projection.domNode|domNode]] property. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] + * objects may only be rendered once. + * @param projectionOptions - Options to be used to create and update the projection. + * @returns The [[Projection]] which also contains the DOM Node that was created. + */ + create: function (vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + createDom(vnode, document.createElement('div'), undefined, projectionOptions); + return createProjection(vnode, projectionOptions); + }, + /** + * Appends a new childnode to the DOM which is generated from a [[VNode]]. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param parentNode - The parent node for the new childNode. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] + * objects may only be rendered once. + * @param projectionOptions - Options to be used to create and update the [[Projection]]. + * @returns The [[Projection]] that was created. + */ + append: function (parentNode, vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + createDom(vnode, parentNode, undefined, projectionOptions); + return createProjection(vnode, projectionOptions); + }, + /** + * Inserts a new DOM node which is generated from a [[VNode]]. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param beforeNode - The node that the DOM Node is inserted before. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. + * NOTE: [[VNode]] objects may only be rendered once. + * @param projectionOptions - Options to be used to create and update the projection, see [[createProjector]]. + * @returns The [[Projection]] that was created. + */ + insertBefore: function (beforeNode, vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + createDom(vnode, beforeNode.parentNode, beforeNode, projectionOptions); + return createProjection(vnode, projectionOptions); + }, + /** + * Merges a new DOM node which is generated from a [[VNode]] with an existing DOM Node. + * This means that the virtual DOM and the real DOM will have one overlapping element. + * Therefore the selector for the root [[VNode]] will be ignored, but its properties and children will be applied to the Element provided. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param domNode - The existing element to adopt as the root of the new virtual DOM. Existing attributes and childnodes are preserved. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] objects + * may only be rendered once. + * @param projectionOptions - Options to be used to create and update the projection, see [[createProjector]]. + * @returns The [[Projection]] that was created. + */ + merge: function (element, vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + vnode.domNode = element; + initPropertiesAndChildren(element, vnode, projectionOptions); + return createProjection(vnode, projectionOptions); + } + }; + /** + * Creates a [[CalculationCache]] object, useful for caching [[VNode]] trees. + * In practice, caching of [[VNode]] trees is not needed, because achieving 60 frames per second is almost never a problem. + * For more information, see [[CalculationCache]]. + * + * @param The type of the value that is cached. + */ + exports.createCache = function () { + var cachedInputs = undefined; + var cachedOutcome = undefined; + var result = { + invalidate: function () { + cachedOutcome = undefined; + cachedInputs = undefined; + }, + result: function (inputs, calculation) { + if (cachedInputs) { + for (var i = 0; i < inputs.length; i++) { + if (cachedInputs[i] !== inputs[i]) { + cachedOutcome = undefined; + } + } + } + if (!cachedOutcome) { + cachedOutcome = calculation(); + cachedInputs = inputs; + } + return cachedOutcome; + } + }; + return result; + }; + /** + * Creates a {@link Mapping} instance that keeps an array of result objects synchronized with an array of source objects. + * See {@link http://maquettejs.org/docs/arrays.html|Working with arrays}. + * + * @param The type of source items. A database-record for instance. + * @param The type of target items. A [[Component]] for instance. + * @param getSourceKey `function(source)` that must return a key to identify each source object. The result must either be a string or a number. + * @param createResult `function(source, index)` that must create a new result object from a given source. This function is identical + * to the `callback` argument in `Array.map(callback)`. + * @param updateResult `function(source, target, index)` that updates a result to an updated source. + */ + exports.createMapping = function (getSourceKey, createResult, updateResult) { + var keys = []; + var results = []; + return { + results: results, + map: function (newSources) { + var newKeys = newSources.map(getSourceKey); + var oldTargets = results.slice(); + var oldIndex = 0; + for (var i = 0; i < newSources.length; i++) { + var source = newSources[i]; + var sourceKey = newKeys[i]; + if (sourceKey === keys[oldIndex]) { + results[i] = oldTargets[oldIndex]; + updateResult(source, oldTargets[oldIndex], i); + oldIndex++; + } else { + var found = false; + for (var j = 1; j < keys.length; j++) { + var searchIndex = (oldIndex + j) % keys.length; + if (keys[searchIndex] === sourceKey) { + results[i] = oldTargets[searchIndex]; + updateResult(newSources[i], oldTargets[searchIndex], i); + oldIndex = searchIndex + 1; + found = true; + break; + } + } + if (!found) { + results[i] = createResult(source, i); + } + } + } + results.length = newSources.length; + keys = newKeys; + } + }; + }; + /** + * Creates a [[Projector]] instance using the provided projectionOptions. + * + * For more information, see [[Projector]]. + * + * @param projectionOptions Options that influence how the DOM is rendered and updated. + */ + exports.createProjector = function (projectorOptions) { + var projector; + var projectionOptions = applyDefaultProjectionOptions(projectorOptions); + projectionOptions.eventHandlerInterceptor = function (propertyName, eventHandler, domNode, properties) { + return function () { + // intercept function calls (event handlers) to do a render afterwards. + projector.scheduleRender(); + return eventHandler.apply(properties.bind || this, arguments); + }; + }; + var renderCompleted = true; + var scheduled; + var stopped = false; + var projections = []; + var renderFunctions = []; + // matches the projections array + var doRender = function () { + scheduled = undefined; + if (!renderCompleted) { + return; // The last render threw an error, it should be logged in the browser console. + } + renderCompleted = false; + for (var i = 0; i < projections.length; i++) { + var updatedVnode = renderFunctions[i](); + projections[i].update(updatedVnode); + } + renderCompleted = true; + }; + projector = { + scheduleRender: function () { + if (!scheduled && !stopped) { + scheduled = requestAnimationFrame(doRender); + } + }, + stop: function () { + if (scheduled) { + cancelAnimationFrame(scheduled); + scheduled = undefined; + } + stopped = true; + }, + resume: function () { + stopped = false; + renderCompleted = true; + projector.scheduleRender(); + }, + append: function (parentNode, renderMaquetteFunction) { + projections.push(exports.dom.append(parentNode, renderMaquetteFunction(), projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + insertBefore: function (beforeNode, renderMaquetteFunction) { + projections.push(exports.dom.insertBefore(beforeNode, renderMaquetteFunction(), projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + merge: function (domNode, renderMaquetteFunction) { + projections.push(exports.dom.merge(domNode, renderMaquetteFunction(), projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + replace: function (domNode, renderMaquetteFunction) { + var vnode = renderMaquetteFunction(); + createDom(vnode, domNode.parentNode, domNode, projectionOptions); + domNode.parentNode.removeChild(domNode); + projections.push(createProjection(vnode, projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + detach: function (renderMaquetteFunction) { + for (var i = 0; i < renderFunctions.length; i++) { + if (renderFunctions[i] === renderMaquetteFunction) { + renderFunctions.splice(i, 1); + return projections.splice(i, 1)[0]; + } + } + throw new Error('renderMaquetteFunction was not found'); + } + }; + return projector; + }; +})); diff --git a/plugins/UiConfig/media/js/utils/Animation.coffee b/plugins/UiConfig/media/js/utils/Animation.coffee new file mode 100644 index 00000000..271b88c1 --- /dev/null +++ b/plugins/UiConfig/media/js/utils/Animation.coffee @@ -0,0 +1,138 @@ +class Animation + slideDown: (elem, props) -> + if elem.offsetTop > 2000 + return + + h = elem.offsetHeight + cstyle = window.getComputedStyle(elem) + margin_top = cstyle.marginTop + margin_bottom = cstyle.marginBottom + padding_top = cstyle.paddingTop + padding_bottom = cstyle.paddingBottom + transition = cstyle.transition + + elem.style.boxSizing = "border-box" + elem.style.overflow = "hidden" + elem.style.transform = "scale(0.6)" + elem.style.opacity = "0" + elem.style.height = "0px" + elem.style.marginTop = "0px" + elem.style.marginBottom = "0px" + elem.style.paddingTop = "0px" + elem.style.paddingBottom = "0px" + elem.style.transition = "none" + + setTimeout (-> + elem.className += " animate-inout" + elem.style.height = h+"px" + elem.style.transform = "scale(1)" + elem.style.opacity = "1" + elem.style.marginTop = margin_top + elem.style.marginBottom = margin_bottom + elem.style.paddingTop = padding_top + elem.style.paddingBottom = padding_bottom + ), 1 + + elem.addEventListener "transitionend", -> + elem.classList.remove("animate-inout") + elem.style.transition = elem.style.transform = elem.style.opacity = elem.style.height = null + elem.style.boxSizing = elem.style.marginTop = elem.style.marginBottom = null + elem.style.paddingTop = elem.style.paddingBottom = elem.style.overflow = null + elem.removeEventListener "transitionend", arguments.callee, false + + + slideUp: (elem, remove_func, props) -> + if elem.offsetTop > 1000 + return remove_func() + + elem.className += " animate-back" + elem.style.boxSizing = "border-box" + elem.style.height = elem.offsetHeight+"px" + elem.style.overflow = "hidden" + elem.style.transform = "scale(1)" + elem.style.opacity = "1" + elem.style.pointerEvents = "none" + setTimeout (-> + elem.style.height = "0px" + elem.style.marginTop = "0px" + elem.style.marginBottom = "0px" + elem.style.paddingTop = "0px" + elem.style.paddingBottom = "0px" + elem.style.transform = "scale(0.8)" + elem.style.borderTopWidth = "0px" + elem.style.borderBottomWidth = "0px" + elem.style.opacity = "0" + ), 1 + elem.addEventListener "transitionend", (e) -> + if e.propertyName == "opacity" or e.elapsedTime >= 0.6 + elem.removeEventListener "transitionend", arguments.callee, false + remove_func() + + + slideUpInout: (elem, remove_func, props) -> + elem.className += " animate-inout" + elem.style.boxSizing = "border-box" + elem.style.height = elem.offsetHeight+"px" + elem.style.overflow = "hidden" + elem.style.transform = "scale(1)" + elem.style.opacity = "1" + elem.style.pointerEvents = "none" + setTimeout (-> + elem.style.height = "0px" + elem.style.marginTop = "0px" + elem.style.marginBottom = "0px" + elem.style.paddingTop = "0px" + elem.style.paddingBottom = "0px" + elem.style.transform = "scale(0.8)" + elem.style.borderTopWidth = "0px" + elem.style.borderBottomWidth = "0px" + elem.style.opacity = "0" + ), 1 + elem.addEventListener "transitionend", (e) -> + if e.propertyName == "opacity" or e.elapsedTime >= 0.6 + elem.removeEventListener "transitionend", arguments.callee, false + remove_func() + + + showRight: (elem, props) -> + elem.className += " animate" + elem.style.opacity = 0 + elem.style.transform = "TranslateX(-20px) Scale(1.01)" + setTimeout (-> + elem.style.opacity = 1 + elem.style.transform = "TranslateX(0px) Scale(1)" + ), 1 + elem.addEventListener "transitionend", -> + elem.classList.remove("animate") + elem.style.transform = elem.style.opacity = null + + + show: (elem, props) -> + delay = arguments[arguments.length-2]?.delay*1000 or 1 + elem.style.opacity = 0 + setTimeout (-> + elem.className += " animate" + ), 1 + setTimeout (-> + elem.style.opacity = 1 + ), delay + elem.addEventListener "transitionend", -> + elem.classList.remove("animate") + elem.style.opacity = null + elem.removeEventListener "transitionend", arguments.callee, false + + hide: (elem, remove_func, props) -> + delay = arguments[arguments.length-2]?.delay*1000 or 1 + elem.className += " animate" + setTimeout (-> + elem.style.opacity = 0 + ), delay + elem.addEventListener "transitionend", (e) -> + if e.propertyName == "opacity" + remove_func() + + addVisibleClass: (elem, props) -> + setTimeout -> + elem.classList.add("visible") + +window.Animation = new Animation() \ No newline at end of file diff --git a/plugins/UiConfig/media/js/utils/Dollar.coffee b/plugins/UiConfig/media/js/utils/Dollar.coffee new file mode 100644 index 00000000..7f19f551 --- /dev/null +++ b/plugins/UiConfig/media/js/utils/Dollar.coffee @@ -0,0 +1,3 @@ +window.$ = (selector) -> + if selector.startsWith("#") + return document.getElementById(selector.replace("#", "")) diff --git a/plugins/UiConfig/media/js/utils/ZeroFrame.coffee b/plugins/UiConfig/media/js/utils/ZeroFrame.coffee new file mode 100644 index 00000000..11512d16 --- /dev/null +++ b/plugins/UiConfig/media/js/utils/ZeroFrame.coffee @@ -0,0 +1,85 @@ +class ZeroFrame extends Class + constructor: (url) -> + @url = url + @waiting_cb = {} + @wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1") + @connect() + @next_message_id = 1 + @history_state = {} + @init() + + + init: -> + @ + + + connect: -> + @target = window.parent + window.addEventListener("message", @onMessage, false) + @cmd("innerReady") + + # Save scrollTop + window.addEventListener "beforeunload", (e) => + @log "save scrollTop", window.pageYOffset + @history_state["scrollTop"] = window.pageYOffset + @cmd "wrapperReplaceState", [@history_state, null] + + # Restore scrollTop + @cmd "wrapperGetState", [], (state) => + @history_state = state if state? + @log "restore scrollTop", state, window.pageYOffset + if window.pageYOffset == 0 and state + window.scroll(window.pageXOffset, state.scrollTop) + + + onMessage: (e) => + message = e.data + cmd = message.cmd + if cmd == "response" + if @waiting_cb[message.to]? + @waiting_cb[message.to](message.result) + else + @log "Websocket callback not found:", message + else if cmd == "wrapperReady" # Wrapper inited later + @cmd("innerReady") + else if cmd == "ping" + @response message.id, "pong" + else if cmd == "wrapperOpenedWebsocket" + @onOpenWebsocket() + else if cmd == "wrapperClosedWebsocket" + @onCloseWebsocket() + else + @onRequest cmd, message.params + + + onRequest: (cmd, message) => + @log "Unknown request", message + + + response: (to, result) -> + @send {"cmd": "response", "to": to, "result": result} + + + cmd: (cmd, params={}, cb=null) -> + @send {"cmd": cmd, "params": params}, cb + + + send: (message, cb=null) -> + message.wrapper_nonce = @wrapper_nonce + message.id = @next_message_id + @next_message_id += 1 + @target.postMessage(message, "*") + if cb + @waiting_cb[message.id] = cb + + + onOpenWebsocket: => + @log "Websocket open" + + + onCloseWebsocket: => + @log "Websocket close" + + + +window.ZeroFrame = ZeroFrame diff --git a/plugins/Zeroname/README.md b/plugins/Zeroname/README.md new file mode 100644 index 00000000..8a306789 --- /dev/null +++ b/plugins/Zeroname/README.md @@ -0,0 +1,54 @@ +# ZeroName + +Zeroname plugin to connect Namecoin and register all the .bit domain name. + +## Start + +You can create your own Zeroname. + +### Namecoin node + +You need to run a namecoin node. + +[Namecoin](https://namecoin.org/download/) + +You will need to start it as a RPC server. + +Example of `~/.namecoin/namecoin.conf` minimal setup: +``` +daemon=1 +rpcuser=your-name +rpcpassword=your-password +rpcport=8336 +server=1 +txindex=1 +``` + +Don't forget to change the `rpcuser` value and `rpcpassword` value! + +You can start your node : `./namecoind` + +### Create a Zeroname site + +You will also need to create a site `python zeronet.py createSite` and regitser the info. + +In the site you will need to create a file `./data//data/names.json` with this is it: +``` +{} +``` + +### `zeroname_config.json` file + +In `~/.namecoin/zeroname_config.json` +``` +{ + "lastprocessed": 223910, + "zeronet_path": "/root/ZeroNet", # Update with your path + "privatekey": "", # Update with your private key of your site + "site": "" # Update with the address of your site +} +``` + +### Run updater + +You can now run the script : `updater/zeroname_updater.py` and wait until it is fully sync (it might take a while). diff --git a/plugins/Zeroname/SiteManagerPlugin.py b/plugins/Zeroname/SiteManagerPlugin.py new file mode 100644 index 00000000..40088f12 --- /dev/null +++ b/plugins/Zeroname/SiteManagerPlugin.py @@ -0,0 +1,84 @@ +import logging +import re +import time + +from Config import config +from Plugin import PluginManager + +allow_reload = False # No reload supported + +log = logging.getLogger("ZeronamePlugin") + + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + site_zeroname = None + db_domains = None + db_domains_modified = None + + def load(self, *args, **kwargs): + super(SiteManagerPlugin, self).load(*args, **kwargs) + if not self.get(config.bit_resolver): + self.need(config.bit_resolver) # Need ZeroName site + + # Checks if it's a valid address + def isAddress(self, address): + return self.isBitDomain(address) or super(SiteManagerPlugin, self).isAddress(address) + + # Return: True if the address is domain + def isDomain(self, address): + return self.isBitDomain(address) or super(SiteManagerPlugin, self).isDomain(address) + + # Return: True if the address is .bit domain + def isBitDomain(self, address): + return re.match(r"(.*?)([A-Za-z0-9_-]+\.bit)$", address) + + # Resolve domain + # Return: The address or None + def resolveDomain(self, domain): + domain = domain.lower() + if not self.site_zeroname: + self.site_zeroname = self.need(config.bit_resolver) + + site_zeroname_modified = self.site_zeroname.content_manager.contents.get("content.json", {}).get("modified", 0) + if not self.db_domains or self.db_domains_modified != site_zeroname_modified: + self.site_zeroname.needFile("data/names.json", priority=10) + s = time.time() + self.db_domains = self.site_zeroname.storage.loadJson("data/names.json") + log.debug( + "Domain db with %s entries loaded in %.3fs (modification: %s -> %s)" % + (len(self.db_domains), time.time() - s, self.db_domains_modified, site_zeroname_modified) + ) + self.db_domains_modified = site_zeroname_modified + return self.db_domains.get(domain) + + # Return or create site and start download site files + # Return: Site or None if dns resolve failed + def need(self, address, *args, **kwargs): + if self.isBitDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: + address = address_resolved + else: + return None + + return super(SiteManagerPlugin, self).need(address, *args, **kwargs) + + # Return: Site object or None if not found + def get(self, address): + if not self.loaded: # Not loaded yet + self.load() + if self.isBitDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: # Domain found + site = self.sites.get(address_resolved) + if site: + site_domain = site.settings.get("domain") + if site_domain != address: + site.settings["domain"] = address + else: # Domain not found + site = self.sites.get(address) + + else: # Access by site address + site = super(SiteManagerPlugin, self).get(address) + return site diff --git a/plugins/Zeroname/UiRequestPlugin.py b/plugins/Zeroname/UiRequestPlugin.py new file mode 100644 index 00000000..b0230524 --- /dev/null +++ b/plugins/Zeroname/UiRequestPlugin.py @@ -0,0 +1,30 @@ +import re + +from Plugin import PluginManager + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + + def __init__(self, *args, **kwargs): + from Site import SiteManager + self.site_manager = SiteManager.site_manager + super(UiRequestPlugin, self).__init__(*args, **kwargs) + + # Media request + def actionSiteMedia(self, path, **kwargs): + match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/" + address + match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs) # Get the wrapper frame output + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("Zeroname plugin") + group.add_argument('--bit_resolver', help='ZeroNet site to resolve .bit domains', default="1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F", metavar="address") + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/Zeroname/__init__.py b/plugins/Zeroname/__init__.py new file mode 100644 index 00000000..cf724069 --- /dev/null +++ b/plugins/Zeroname/__init__.py @@ -0,0 +1,2 @@ +from . import UiRequestPlugin +from . import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/Zeroname/updater/zeroname_updater.py b/plugins/Zeroname/updater/zeroname_updater.py new file mode 100644 index 00000000..1e00332c --- /dev/null +++ b/plugins/Zeroname/updater/zeroname_updater.py @@ -0,0 +1,249 @@ +from __future__ import print_function +import time +import json +import os +import sys +import re +import socket + +from six import string_types + +from subprocess import call +from bitcoinrpc.authproxy import AuthServiceProxy + + +def publish(): + print("* Signing and Publishing...") + call(" ".join(command_sign_publish), shell=True) + + +def processNameOp(domain, value, test=False): + if not value.strip().startswith("{"): + return False + try: + data = json.loads(value) + except Exception as err: + print("Json load error: %s" % err) + return False + if "zeronet" not in data and "map" not in data: + # Namecoin standard use {"map": { "blog": {"zeronet": "1D..."} }} + print("No zeronet and no map in ", data.keys()) + return False + if "map" in data: + # If subdomains using the Namecoin standard is present, just re-write in the Zeronet way + # and call the function again + data_map = data["map"] + new_value = {} + for subdomain in data_map: + if "zeronet" in data_map[subdomain]: + new_value[subdomain] = data_map[subdomain]["zeronet"] + if "zeronet" in data and isinstance(data["zeronet"], string_types): + # { + # "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9", + # .... + # } + new_value[""] = data["zeronet"] + if len(new_value) > 0: + return processNameOp(domain, json.dumps({"zeronet": new_value}), test) + else: + return False + if "zeronet" in data and isinstance(data["zeronet"], string_types): + # { + # "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9" + # } is valid + return processNameOp(domain, json.dumps({"zeronet": { "": data["zeronet"]}}), test) + if not isinstance(data["zeronet"], dict): + print("Not dict: ", data["zeronet"]) + return False + if not re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", domain): + print("Invalid domain: ", domain) + return False + + if test: + return True + + if "slave" in sys.argv: + print("Waiting for master update arrive") + time.sleep(30) # Wait 30 sec to allow master updater + + # Note: Requires the file data/names.json to exist and contain "{}" to work + names_raw = open(names_path, "rb").read() + names = json.loads(names_raw) + for subdomain, address in data["zeronet"].items(): + subdomain = subdomain.lower() + address = re.sub("[^A-Za-z0-9]", "", address) + print(subdomain, domain, "->", address) + if subdomain: + if re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", subdomain): + names["%s.%s.bit" % (subdomain, domain)] = address + else: + print("Invalid subdomain:", domain, subdomain) + else: + names["%s.bit" % domain] = address + + new_names_raw = json.dumps(names, indent=2, sort_keys=True) + if new_names_raw != names_raw: + open(names_path, "wb").write(new_names_raw) + print("-", domain, "Changed") + return True + else: + print("-", domain, "Not changed") + return False + + +def processBlock(block_id, test=False): + print("Processing block #%s..." % block_id) + s = time.time() + block_hash = rpc.getblockhash(block_id) + block = rpc.getblock(block_hash) + + print("Checking %s tx" % len(block["tx"])) + updated = 0 + for tx in block["tx"]: + try: + transaction = rpc.getrawtransaction(tx, 1) + for vout in transaction.get("vout", []): + if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]: + name_op = vout["scriptPubKey"]["nameOp"] + updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"], test) + except Exception as err: + print("Error processing tx #%s %s" % (tx, err)) + print("Done in %.3fs (updated %s)." % (time.time() - s, updated)) + return updated + +# Connecting to RPC +def initRpc(config): + """Initialize Namecoin RPC""" + rpc_data = { + 'connect': '127.0.0.1', + 'port': '8336', + 'user': 'PLACEHOLDER', + 'password': 'PLACEHOLDER', + 'clienttimeout': '900' + } + try: + fptr = open(config, 'r') + lines = fptr.readlines() + fptr.close() + except: + return None # Or take some other appropriate action + + for line in lines: + if not line.startswith('rpc'): + continue + key_val = line.split(None, 1)[0] + (key, val) = key_val.split('=', 1) + if not key or not val: + continue + rpc_data[key[3:]] = val + + url = 'http://%(user)s:%(password)s@%(connect)s:%(port)s' % rpc_data + + return url, int(rpc_data['clienttimeout']) + +# Loading config... + +# Check whether platform is on windows or linux +# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin + +if sys.platform == "win32": + namecoin_location = os.getenv('APPDATA') + "/Namecoin/" +else: + namecoin_location = os.path.expanduser("~/.namecoin/") + +config_path = namecoin_location + 'zeroname_config.json' +if not os.path.isfile(config_path): # Create sample config + open(config_path, "w").write( + json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet', 'privatekey': '', 'lastprocessed': 223910}, indent=2) + ) + print("* Example config written to %s" % config_path) + sys.exit(0) + +config = json.load(open(config_path)) +names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"]) +os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is. + +# Parameters to sign and publish +command_sign_publish = [sys.executable, "zeronet.py", "siteSign", config["site"], config["privatekey"], "--publish"] +if sys.platform == 'win32': + command_sign_publish = ['"%s"' % param for param in command_sign_publish] + +# Initialize rpc connection +rpc_auth, rpc_timeout = initRpc(namecoin_location + "namecoin.conf") +rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) + +node_version = rpc.getnetworkinfo()['version'] + +while 1: + try: + time.sleep(1) + if node_version < 160000 : + last_block = int(rpc.getinfo()["blocks"]) + else: + last_block = int(rpc.getblockchaininfo()["blocks"]) + break # Connection succeeded + except socket.timeout: # Timeout + print(".", end=' ') + sys.stdout.flush() + except Exception as err: + print("Exception", err.__class__, err) + time.sleep(5) + rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) + +if not config["lastprocessed"]: # First startup: Start processing from last block + config["lastprocessed"] = last_block + + +print("- Testing domain parsing...") +assert processBlock(223911, test=True) # Testing zeronetwork.bit +assert processBlock(227052, test=True) # Testing brainwallets.bit +assert not processBlock(236824, test=True) # Utf8 domain name (invalid should skip) +assert not processBlock(236752, test=True) # Uppercase domain (invalid should skip) +assert processBlock(236870, test=True) # Encoded domain (should pass) +assert processBlock(438317, test=True) # Testing namecoin standard artifaxradio.bit (should pass) +# sys.exit(0) + +print("- Parsing skipped blocks...") +should_publish = False +for block_id in range(config["lastprocessed"], last_block + 1): + if processBlock(block_id): + should_publish = True +config["lastprocessed"] = last_block + +if should_publish: + publish() + +while 1: + print("- Waiting for new block") + sys.stdout.flush() + while 1: + try: + time.sleep(1) + if node_version < 160000 : + rpc.waitforblock() + else: + rpc.waitfornewblock() + print("Found") + break # Block found + except socket.timeout: # Timeout + print(".", end=' ') + sys.stdout.flush() + except Exception as err: + print("Exception", err.__class__, err) + time.sleep(5) + rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) + + if node_version < 160000 : + last_block = int(rpc.getinfo()["blocks"]) + else: + last_block = int(rpc.getblockchaininfo()["blocks"]) + should_publish = False + for block_id in range(config["lastprocessed"] + 1, last_block + 1): + if processBlock(block_id): + should_publish = True + + config["lastprocessed"] = last_block + open(config_path, "w").write(json.dumps(config, indent=2)) + + if should_publish: + publish() diff --git a/plugins/__init__.py b/plugins/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/plugins/disabled-Bootstrapper/BootstrapperDb.py b/plugins/disabled-Bootstrapper/BootstrapperDb.py new file mode 100644 index 00000000..fcc428f7 --- /dev/null +++ b/plugins/disabled-Bootstrapper/BootstrapperDb.py @@ -0,0 +1,156 @@ +import time +import re + +import gevent + +from Config import config +from Db import Db +from util import helper + + +class BootstrapperDb(Db.Db): + def __init__(self): + self.version = 7 + self.hash_ids = {} # hash -> id cache + super(BootstrapperDb, self).__init__({"db_name": "Bootstrapper"}, "%s/bootstrapper.db" % config.data_dir) + self.foreign_keys = True + self.checkTables() + self.updateHashCache() + gevent.spawn(self.cleanup) + + def cleanup(self): + while 1: + time.sleep(4 * 60) + timeout = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time() - 60 * 40)) + self.execute("DELETE FROM peer WHERE date_announced < ?", [timeout]) + + def updateHashCache(self): + res = self.execute("SELECT * FROM hash") + self.hash_ids = {str(row["hash"]): row["hash_id"] for row in res} + self.log.debug("Loaded %s hash_ids" % len(self.hash_ids)) + + def checkTables(self): + version = int(self.execute("PRAGMA user_version").fetchone()[0]) + self.log.debug("Db version: %s, needed: %s" % (version, self.version)) + if version < self.version: + self.createTables() + else: + self.execute("VACUUM") + + def createTables(self): + # Delete all tables + self.execute("PRAGMA writable_schema = 1") + self.execute("DELETE FROM sqlite_master WHERE type IN ('table', 'index', 'trigger')") + self.execute("PRAGMA writable_schema = 0") + self.execute("VACUUM") + self.execute("PRAGMA INTEGRITY_CHECK") + # Create new tables + self.execute(""" + CREATE TABLE peer ( + peer_id INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE, + type TEXT, + address TEXT, + port INTEGER NOT NULL, + date_added DATETIME DEFAULT (CURRENT_TIMESTAMP), + date_announced DATETIME DEFAULT (CURRENT_TIMESTAMP) + ); + """) + self.execute("CREATE UNIQUE INDEX peer_key ON peer (address, port);") + + self.execute(""" + CREATE TABLE peer_to_hash ( + peer_to_hash_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE NOT NULL, + peer_id INTEGER REFERENCES peer (peer_id) ON DELETE CASCADE, + hash_id INTEGER REFERENCES hash (hash_id) + ); + """) + self.execute("CREATE INDEX peer_id ON peer_to_hash (peer_id);") + self.execute("CREATE INDEX hash_id ON peer_to_hash (hash_id);") + + self.execute(""" + CREATE TABLE hash ( + hash_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE NOT NULL, + hash BLOB UNIQUE NOT NULL, + date_added DATETIME DEFAULT (CURRENT_TIMESTAMP) + ); + """) + self.execute("PRAGMA user_version = %s" % self.version) + + def getHashId(self, hash): + if hash not in self.hash_ids: + self.log.debug("New hash: %s" % repr(hash)) + self.execute("INSERT OR IGNORE INTO hash ?", {"hash": hash}) + self.hash_ids[hash] = self.cur.cursor.lastrowid + return self.hash_ids[hash] + + def peerAnnounce(self, ip_type, address, port=None, hashes=[], onion_signed=False, delete_missing_hashes=False): + hashes_ids_announced = [] + for hash in hashes: + hashes_ids_announced.append(self.getHashId(hash)) + + # Check user + res = self.execute("SELECT peer_id FROM peer WHERE ? LIMIT 1", {"address": address, "port": port}) + + user_row = res.fetchone() + now = time.strftime("%Y-%m-%d %H:%M:%S") + if user_row: + peer_id = user_row["peer_id"] + self.execute("UPDATE peer SET date_announced = ? WHERE peer_id = ?", (now, peer_id)) + else: + self.log.debug("New peer: %s signed: %s" % (address, onion_signed)) + if ip_type == "onion" and not onion_signed: + return len(hashes) + self.execute("INSERT INTO peer ?", {"type": ip_type, "address": address, "port": port, "date_announced": now}) + peer_id = self.cur.cursor.lastrowid + + # Check user's hashes + res = self.execute("SELECT * FROM peer_to_hash WHERE ?", {"peer_id": peer_id}) + hash_ids_db = [row["hash_id"] for row in res] + if hash_ids_db != hashes_ids_announced: + hash_ids_added = set(hashes_ids_announced) - set(hash_ids_db) + hash_ids_removed = set(hash_ids_db) - set(hashes_ids_announced) + if ip_type != "onion" or onion_signed: + for hash_id in hash_ids_added: + self.execute("INSERT INTO peer_to_hash ?", {"peer_id": peer_id, "hash_id": hash_id}) + if hash_ids_removed and delete_missing_hashes: + self.execute("DELETE FROM peer_to_hash WHERE ?", {"peer_id": peer_id, "hash_id": list(hash_ids_removed)}) + + return len(hash_ids_added) + len(hash_ids_removed) + else: + return 0 + + def peerList(self, hash, address=None, onions=[], port=None, limit=30, need_types=["ipv4", "onion"], order=True): + back = {"ipv4": [], "ipv6": [], "onion": []} + if limit == 0: + return back + hashid = self.getHashId(hash) + + if order: + order_sql = "ORDER BY date_announced DESC" + else: + order_sql = "" + where_sql = "hash_id = :hashid" + if onions: + onions_escaped = ["'%s'" % re.sub("[^a-z0-9,]", "", onion) for onion in onions if type(onion) is str] + where_sql += " AND address NOT IN (%s)" % ",".join(onions_escaped) + elif address: + where_sql += " AND NOT (address = :address AND port = :port)" + + query = """ + SELECT type, address, port + FROM peer_to_hash + LEFT JOIN peer USING (peer_id) + WHERE %s + %s + LIMIT :limit + """ % (where_sql, order_sql) + res = self.execute(query, {"hashid": hashid, "address": address, "port": port, "limit": limit}) + + for row in res: + if row["type"] in need_types: + if row["type"] == "onion": + packed = helper.packOnionAddress(row["address"], row["port"]) + else: + packed = helper.packAddress(str(row["address"]), row["port"]) + back[row["type"]].append(packed) + return back diff --git a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py new file mode 100644 index 00000000..b6d9e178 --- /dev/null +++ b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py @@ -0,0 +1,155 @@ +import time + +from util import helper + +from Plugin import PluginManager +from .BootstrapperDb import BootstrapperDb +from Crypt import CryptRsa +from Config import config + +if "db" not in locals().keys(): # Share during reloads + db = BootstrapperDb() + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def checkOnionSigns(self, onions, onion_signs, onion_sign_this): + if not onion_signs or len(onion_signs) != len(set(onions)): + return False + + if time.time() - float(onion_sign_this) > 3 * 60: + return False # Signed out of allowed 3 minutes + + onions_signed = [] + # Check onion signs + for onion_publickey, onion_sign in onion_signs.items(): + if CryptRsa.verify(onion_sign_this.encode(), onion_publickey, onion_sign): + onions_signed.append(CryptRsa.publickeyToOnion(onion_publickey)) + else: + break + + # Check if the same onion addresses signed as the announced onces + if sorted(onions_signed) == sorted(set(onions)): + return True + else: + return False + + def actionAnnounce(self, params): + time_started = time.time() + s = time.time() + # Backward compatibility + if "ip4" in params["add"]: + params["add"].append("ipv4") + if "ip4" in params["need_types"]: + params["need_types"].append("ipv4") + + hashes = params["hashes"] + + all_onions_signed = self.checkOnionSigns(params.get("onions", []), params.get("onion_signs"), params.get("onion_sign_this")) + + time_onion_check = time.time() - s + + ip_type = helper.getIpType(self.connection.ip) + + if ip_type == "onion" or self.connection.ip in config.ip_local: + is_port_open = False + elif ip_type in params["add"]: + is_port_open = True + else: + is_port_open = False + + s = time.time() + # Separatley add onions to sites or at once if no onions present + i = 0 + onion_to_hash = {} + for onion in params.get("onions", []): + if onion not in onion_to_hash: + onion_to_hash[onion] = [] + onion_to_hash[onion].append(hashes[i]) + i += 1 + + hashes_changed = 0 + for onion, onion_hashes in onion_to_hash.items(): + hashes_changed += db.peerAnnounce( + ip_type="onion", + address=onion, + port=params["port"], + hashes=onion_hashes, + onion_signed=all_onions_signed + ) + time_db_onion = time.time() - s + + s = time.time() + + if is_port_open: + hashes_changed += db.peerAnnounce( + ip_type=ip_type, + address=self.connection.ip, + port=params["port"], + hashes=hashes, + delete_missing_hashes=params.get("delete") + ) + time_db_ip = time.time() - s + + s = time.time() + # Query sites + back = {} + peers = [] + if params.get("onions") and not all_onions_signed and hashes_changed: + back["onion_sign_this"] = "%.0f" % time.time() # Send back nonce for signing + + if len(hashes) > 500 or not hashes_changed: + limit = 5 + order = False + else: + limit = 30 + order = True + for hash in hashes: + if time.time() - time_started > 1: # 1 sec limit on request + self.connection.log("Announce time limit exceeded after %s/%s sites" % (len(peers), len(hashes))) + break + + hash_peers = db.peerList( + hash, + address=self.connection.ip, onions=list(onion_to_hash.keys()), port=params["port"], + limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order + ) + if "ip4" in params["need_types"]: # Backward compatibility + hash_peers["ip4"] = hash_peers["ipv4"] + del(hash_peers["ipv4"]) + peers.append(hash_peers) + time_peerlist = time.time() - s + + back["peers"] = peers + self.connection.log( + "Announce %s sites (onions: %s, onion_check: %.3fs, db_onion: %.3fs, db_ip: %.3fs, peerlist: %.3fs, limit: %s)" % + (len(hashes), len(onion_to_hash), time_onion_check, time_db_onion, time_db_ip, time_peerlist, limit) + ) + self.response(back) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionStatsBootstrapper(self): + self.sendHeader() + + # Style + yield """ + + """ + + hash_rows = db.execute("SELECT * FROM hash").fetchall() + for hash_row in hash_rows: + peer_rows = db.execute( + "SELECT * FROM peer LEFT JOIN peer_to_hash USING (peer_id) WHERE hash_id = :hash_id", + {"hash_id": hash_row["hash_id"]} + ).fetchall() + + yield "
    %s (added: %s, peers: %s)
    " % ( + str(hash_row["hash"]).encode("hex"), hash_row["date_added"], len(peer_rows) + ) + for peer_row in peer_rows: + yield " - {ip4: <30} {onion: <30} added: {date_added}, announced: {date_announced}
    ".format(**dict(peer_row)) diff --git a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py new file mode 100644 index 00000000..983cb44c --- /dev/null +++ b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py @@ -0,0 +1,224 @@ +import hashlib +import os + +import pytest + +from Bootstrapper import BootstrapperPlugin +from Bootstrapper.BootstrapperDb import BootstrapperDb +from Peer import Peer +from Crypt import CryptRsa +from util import helper + + +@pytest.fixture() +def bootstrapper_db(request): + BootstrapperPlugin.db.close() + BootstrapperPlugin.db = BootstrapperDb() + BootstrapperPlugin.db.createTables() # Reset db + BootstrapperPlugin.db.cur.logging = True + + def cleanup(): + BootstrapperPlugin.db.close() + os.unlink(BootstrapperPlugin.db.db_path) + + request.addfinalizer(cleanup) + return BootstrapperPlugin.db + + +@pytest.mark.usefixtures("resetSettings") +class TestBootstrapper: + def testBootstrapperDb(self, file_server, bootstrapper_db): + ip_type = helper.getIpType(file_server.ip) + peer = Peer(file_server.ip, 1544, connection_server=file_server) + hash1 = hashlib.sha256(b"site1").digest() + hash2 = hashlib.sha256(b"site2").digest() + hash3 = hashlib.sha256(b"site3").digest() + + # Verify empty result + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] + }) + + assert len(res["peers"][0][ip_type]) == 0 # Empty result + + # Verify added peer on previous request + bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1, hash2], delete_missing_hashes=True) + + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] + }) + assert len(res["peers"][0][ip_type]) == 1 + assert len(res["peers"][1][ip_type]) == 1 + + # hash2 deleted from 1.2.3.4 + bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1], delete_missing_hashes=True) + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] + }) + assert len(res["peers"][0][ip_type]) == 1 + assert len(res["peers"][1][ip_type]) == 0 + + # Announce 3 hash again + bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1, hash2, hash3], delete_missing_hashes=True) + res = peer.request("announce", { + "hashes": [hash1, hash2, hash3], + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] + }) + assert len(res["peers"][0][ip_type]) == 1 + assert len(res["peers"][1][ip_type]) == 1 + assert len(res["peers"][2][ip_type]) == 1 + + # Single hash announce + res = peer.request("announce", { + "hashes": [hash1], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] + }) + assert len(res["peers"][0][ip_type]) == 1 + + # Test DB cleanup + assert [row[0] for row in bootstrapper_db.execute("SELECT address FROM peer").fetchall()] == [file_server.ip_external] # 127.0.0.1 never get added to db + + # Delete peers + bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external]) + assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer_to_hash").fetchone()["num"] == 0 + + assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM hash").fetchone()["num"] == 3 # 3 sites + assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 0 # 0 peer + + def testPassive(self, file_server, bootstrapper_db): + peer = Peer(file_server.ip, 1544, connection_server=file_server) + ip_type = helper.getIpType(file_server.ip) + hash1 = hashlib.sha256(b"hash1").digest() + + bootstrapper_db.peerAnnounce(ip_type, address=None, port=15441, hashes=[hash1]) + res = peer.request("announce", { + "hashes": [hash1], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [] + }) + + assert len(res["peers"][0]["ipv4"]) == 0 # Empty result + + def testAddOnion(self, file_server, site, bootstrapper_db, tor_manager): + onion1 = tor_manager.addOnion() + onion2 = tor_manager.addOnion() + peer = Peer(file_server.ip, 1544, connection_server=file_server) + hash1 = hashlib.sha256(b"site1").digest() + hash2 = hashlib.sha256(b"site2").digest() + hash3 = hashlib.sha256(b"site3").digest() + + bootstrapper_db.peerAnnounce(ip_type="ipv4", address="1.2.3.4", port=1234, hashes=[hash1, hash2, hash3]) + res = peer.request("announce", { + "onions": [onion1, onion1, onion2], + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert len(res["peers"][0]["ipv4"]) == 1 + + # Onion address not added yet + site_peers = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers["onion"]) == 0 + assert "onion_sign_this" in res + + # Sign the nonces + sign1 = CryptRsa.sign(res["onion_sign_this"].encode(), tor_manager.getPrivatekey(onion1)) + sign2 = CryptRsa.sign(res["onion_sign_this"].encode(), tor_manager.getPrivatekey(onion2)) + + # Bad sign (different address) + res = peer.request("announce", { + "onions": [onion1], "onion_sign_this": res["onion_sign_this"], + "onion_signs": {tor_manager.getPublickey(onion2): sign2}, + "hashes": [hash1], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert "onion_sign_this" in res + site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers1["onion"]) == 0 # Not added + + # Bad sign (missing one) + res = peer.request("announce", { + "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], + "onion_signs": {tor_manager.getPublickey(onion1): sign1}, + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert "onion_sign_this" in res + site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers1["onion"]) == 0 # Not added + + # Good sign + res = peer.request("announce", { + "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], + "onion_signs": {tor_manager.getPublickey(onion1): sign1, tor_manager.getPublickey(onion2): sign2}, + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert "onion_sign_this" not in res + + # Onion addresses added + site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers1["onion"]) == 1 + site_peers2 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash2) + assert len(site_peers2["onion"]) == 1 + site_peers3 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash3) + assert len(site_peers3["onion"]) == 1 + + assert site_peers1["onion"][0] == site_peers2["onion"][0] + assert site_peers2["onion"][0] != site_peers3["onion"][0] + assert helper.unpackOnionAddress(site_peers1["onion"][0])[0] == onion1 + ".onion" + assert helper.unpackOnionAddress(site_peers2["onion"][0])[0] == onion1 + ".onion" + assert helper.unpackOnionAddress(site_peers3["onion"][0])[0] == onion2 + ".onion" + + tor_manager.delOnion(onion1) + tor_manager.delOnion(onion2) + + def testRequestPeers(self, file_server, site, bootstrapper_db, tor_manager): + site.connection_server = file_server + file_server.tor_manager = tor_manager + hash = hashlib.sha256(site.address.encode()).digest() + + # Request peers from tracker + assert len(site.peers) == 0 + bootstrapper_db.peerAnnounce(ip_type="ipv4", address="1.2.3.4", port=1234, hashes=[hash]) + site.announcer.announceTracker("zero://%s:%s" % (file_server.ip, file_server.port)) + assert len(site.peers) == 1 + + # Test onion address store + bootstrapper_db.peerAnnounce(ip_type="onion", address="bka4ht2bzxchy44r", port=1234, hashes=[hash], onion_signed=True) + site.announcer.announceTracker("zero://%s:%s" % (file_server.ip, file_server.port)) + assert len(site.peers) == 2 + assert "bka4ht2bzxchy44r.onion:1234" in site.peers + + @pytest.mark.slow + def testAnnounce(self, file_server, tor_manager): + file_server.tor_manager = tor_manager + hash1 = hashlib.sha256(b"1Nekos4fiBqfcazyG1bAxdBT5oBvA76Z").digest() + hash2 = hashlib.sha256(b"1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr").digest() + peer = Peer("zero.booth.moe", 443, connection_server=file_server) + assert peer.request("ping") + peer = Peer("boot3rdez4rzn36x.onion", 15441, connection_server=file_server) + assert peer.request("ping") + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": ["ip4", "onion"], "need_num": 100, "add": [""] + }) + + assert res + + def testBackwardCompatibility(self, file_server, bootstrapper_db): + peer = Peer(file_server.ip, 1544, connection_server=file_server) + hash1 = hashlib.sha256(b"site1").digest() + + bootstrapper_db.peerAnnounce("ipv4", file_server.ip_external, port=15441, hashes=[hash1], delete_missing_hashes=True) + + # Test with ipv4 need type + res = peer.request("announce", { + "hashes": [hash1], + "port": 15441, "need_types": ["ipv4"], "need_num": 10, "add": [] + }) + + assert len(res["peers"][0]["ipv4"]) == 1 + + # Test with ip4 need type + res = peer.request("announce", { + "hashes": [hash1], + "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] + }) + + assert len(res["peers"][0]["ip4"]) == 1 diff --git a/plugins/disabled-Bootstrapper/Test/conftest.py b/plugins/disabled-Bootstrapper/Test/conftest.py new file mode 100644 index 00000000..8c1df5b2 --- /dev/null +++ b/plugins/disabled-Bootstrapper/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/disabled-Bootstrapper/Test/pytest.ini b/plugins/disabled-Bootstrapper/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/disabled-Bootstrapper/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/disabled-Bootstrapper/__init__.py b/plugins/disabled-Bootstrapper/__init__.py new file mode 100644 index 00000000..cce30eea --- /dev/null +++ b/plugins/disabled-Bootstrapper/__init__.py @@ -0,0 +1 @@ +from . import BootstrapperPlugin \ No newline at end of file diff --git a/plugins/disabled-Dnschain/SiteManagerPlugin.py b/plugins/disabled-Dnschain/SiteManagerPlugin.py new file mode 100644 index 00000000..8b9508f1 --- /dev/null +++ b/plugins/disabled-Dnschain/SiteManagerPlugin.py @@ -0,0 +1,153 @@ +import logging, json, os, re, sys, time +import gevent +from Plugin import PluginManager +from Config import config +from util import Http +from Debug import Debug + +allow_reload = False # No reload supported + +log = logging.getLogger("DnschainPlugin") + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + dns_cache_path = "%s/dns_cache.json" % config.data_dir + dns_cache = None + + # Checks if its a valid address + def isAddress(self, address): + if self.isDomain(address): + return True + else: + return super(SiteManagerPlugin, self).isAddress(address) + + + # Return: True if the address is domain + def isDomain(self, address): + return re.match(r"(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + + + # Load dns entries from data/dns_cache.json + def loadDnsCache(self): + if os.path.isfile(self.dns_cache_path): + self.dns_cache = json.load(open(self.dns_cache_path)) + else: + self.dns_cache = {} + log.debug("Loaded dns cache, entries: %s" % len(self.dns_cache)) + + + # Save dns entries to data/dns_cache.json + def saveDnsCache(self): + json.dump(self.dns_cache, open(self.dns_cache_path, "wb"), indent=2) + + + # Resolve domain using dnschain.net + # Return: The address or None + def resolveDomainDnschainNet(self, domain): + try: + match = self.isDomain(domain) + sub_domain = match.group(1).strip(".") + top_domain = match.group(2) + if not sub_domain: sub_domain = "@" + address = None + with gevent.Timeout(5, Exception("Timeout: 5s")): + res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read() + data = json.loads(res)["data"]["value"] + if "zeronet" in data: + for key, val in data["zeronet"].items(): + self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours + self.saveDnsCache() + return data["zeronet"].get(sub_domain) + # Not found + return address + except Exception as err: + log.debug("Dnschain.net %s resolve error: %s" % (domain, Debug.formatException(err))) + + + # Resolve domain using dnschain.info + # Return: The address or None + def resolveDomainDnschainInfo(self, domain): + try: + match = self.isDomain(domain) + sub_domain = match.group(1).strip(".") + top_domain = match.group(2) + if not sub_domain: sub_domain = "@" + address = None + with gevent.Timeout(5, Exception("Timeout: 5s")): + res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read() + data = json.loads(res)["value"] + for key, val in data["zeronet"].items(): + self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours + self.saveDnsCache() + return data["zeronet"].get(sub_domain) + # Not found + return address + except Exception as err: + log.debug("Dnschain.info %s resolve error: %s" % (domain, Debug.formatException(err))) + + + # Resolve domain + # Return: The address or None + def resolveDomain(self, domain): + domain = domain.lower() + if self.dns_cache == None: + self.loadDnsCache() + if domain.count(".") < 2: # Its a topleved request, prepend @. to it + domain = "@."+domain + + domain_details = self.dns_cache.get(domain) + if domain_details and time.time() < domain_details[1]: # Found in cache and its not expired + return domain_details[0] + else: + # Resovle dns using dnschain + thread_dnschain_info = gevent.spawn(self.resolveDomainDnschainInfo, domain) + thread_dnschain_net = gevent.spawn(self.resolveDomainDnschainNet, domain) + gevent.joinall([thread_dnschain_net, thread_dnschain_info]) # Wait for finish + + if thread_dnschain_info.value and thread_dnschain_net.value: # Booth successfull + if thread_dnschain_info.value == thread_dnschain_net.value: # Same returned value + return thread_dnschain_info.value + else: + log.error("Dns %s missmatch: %s != %s" % (domain, thread_dnschain_info.value, thread_dnschain_net.value)) + + # Problem during resolve + if domain_details: # Resolve failed, but we have it in the cache + domain_details[1] = time.time()+60*60 # Dont try again for 1 hour + return domain_details[0] + else: # Not found in cache + self.dns_cache[domain] = [None, time.time()+60] # Don't check again for 1 min + return None + + + # Return or create site and start download site files + # Return: Site or None if dns resolve failed + def need(self, address, all_file=True): + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: + address = address_resolved + else: + return None + + return super(SiteManagerPlugin, self).need(address, all_file) + + + # Return: Site object or None if not found + def get(self, address): + if self.sites == None: # Not loaded yet + self.load() + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: # Domain found + site = self.sites.get(address_resolved) + if site: + site_domain = site.settings.get("domain") + if site_domain != address: + site.settings["domain"] = address + else: # Domain not found + site = self.sites.get(address) + + else: # Access by site address + site = self.sites.get(address) + return site + diff --git a/plugins/disabled-Dnschain/UiRequestPlugin.py b/plugins/disabled-Dnschain/UiRequestPlugin.py new file mode 100644 index 00000000..8ab9d5c5 --- /dev/null +++ b/plugins/disabled-Dnschain/UiRequestPlugin.py @@ -0,0 +1,34 @@ +import re +from Plugin import PluginManager + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def __init__(self, server = None): + from Site import SiteManager + self.site_manager = SiteManager.site_manager + super(UiRequestPlugin, self).__init__(server) + + + # Media request + def actionSiteMedia(self, path): + match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/"+address+match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output + + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_site_address = re.match(r"/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") + + if referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False + diff --git a/plugins/disabled-Dnschain/__init__.py b/plugins/disabled-Dnschain/__init__.py new file mode 100644 index 00000000..2b36af5d --- /dev/null +++ b/plugins/disabled-Dnschain/__init__.py @@ -0,0 +1,3 @@ +# This plugin is experimental, if you really want to enable uncomment the following lines: +# import DnschainPlugin +# import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-DonationMessage/DonationMessagePlugin.py b/plugins/disabled-DonationMessage/DonationMessagePlugin.py new file mode 100644 index 00000000..8cf0d541 --- /dev/null +++ b/plugins/disabled-DonationMessage/DonationMessagePlugin.py @@ -0,0 +1,22 @@ +import re +from Plugin import PluginManager + +# Warning: If you modify the donation address then renmae the plugin's directory to "MyDonationMessage" to prevent the update script overwrite + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Inject a donation message to every page top right corner + def renderWrapper(self, *args, **kwargs): + body = super(UiRequestPlugin, self).renderWrapper(*args, **kwargs) # Get the wrapper frame output + + inject_html = """ + + Please donate to help to keep this ZeroProxy alive + + + """ + + return re.sub(r"\s*\s*$", inject_html, body) diff --git a/plugins/disabled-DonationMessage/__init__.py b/plugins/disabled-DonationMessage/__init__.py new file mode 100644 index 00000000..1d4b47c3 --- /dev/null +++ b/plugins/disabled-DonationMessage/__init__.py @@ -0,0 +1 @@ +from . import DonationMessagePlugin diff --git a/plugins/disabled-Multiuser/MultiuserPlugin.py b/plugins/disabled-Multiuser/MultiuserPlugin.py new file mode 100644 index 00000000..2406e224 --- /dev/null +++ b/plugins/disabled-Multiuser/MultiuserPlugin.py @@ -0,0 +1,233 @@ +import re +import sys +import json + +from Config import config +from Plugin import PluginManager +from Crypt import CryptBitcoin +from . import UserPlugin + +# We can only import plugin host clases after the plugins are loaded +@PluginManager.afterLoad +def importPluginnedClasses(): + global UserManager + from User import UserManager + +try: + local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json +except Exception as err: + local_master_addresses = set() + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def __init__(self, *args, **kwargs): + self.user_manager = UserManager.user_manager + super(UiRequestPlugin, self).__init__(*args, **kwargs) + + # Create new user and inject user welcome message if necessary + # Return: Html body also containing the injection + def actionWrapper(self, path, extra_headers=None): + + match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) + if not match: + return False + + inner_path = match.group("inner_path").lstrip("/") + html_request = "." not in inner_path or inner_path.endswith(".html") # Only inject html to html requests + + user_created = False + if html_request: + user = self.getCurrentUser() # Get user from cookie + if not user: # No user found by cookie + user = self.user_manager.create() + user_created = True + else: + user = None + + # Disable new site creation if --multiuser_no_new_sites enabled + if config.multiuser_no_new_sites: + path_parts = self.parsePath(path) + if not self.server.site_manager.get(match.group("address")) and (not user or user.master_address not in local_master_addresses): + self.sendHeader(404) + return self.formatError("Not Found", "Adding new sites disabled on this proxy", details=False) + + if user_created: + if not extra_headers: + extra_headers = {} + extra_headers['Set-Cookie'] = "master_address=%s;path=/;max-age=2592000;" % user.master_address # = 30 days + + loggedin = self.get.get("login") == "done" + + back_generator = super(UiRequestPlugin, self).actionWrapper(path, extra_headers) # Get the wrapper frame output + + if not back_generator: # Wrapper error or not string returned, injection not possible + return False + + elif loggedin: + back = next(back_generator) + inject_html = """ + + + + + """.replace("\t", "") + if user.master_address in local_master_addresses: + message = "Hello master!" + else: + message = "Hello again!" + inject_html = inject_html.replace("{message}", message) + inject_html = inject_html.replace("{script_nonce}", self.getScriptNonce()) + return iter([re.sub(b"\s*\s*$", inject_html.encode(), back)]) # Replace the tags with the injection + + else: # No injection necessary + return back_generator + + # Get the current user based on request's cookies + # Return: User object or None if no match + def getCurrentUser(self): + cookies = self.getCookies() + user = None + if "master_address" in cookies: + users = self.user_manager.list() + user = users.get(cookies["master_address"]) + return user + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def __init__(self, *args, **kwargs): + self.multiuser_denied_cmds = ( + "siteDelete", "configSet", "serverShutdown", "serverUpdate", "siteClone", + "siteSetOwned", "siteSetAutodownloadoptional", "dbReload", "dbRebuild", + "mergerSiteDelete", "siteSetLimit", "siteSetAutodownloadBigfileLimit", + "optionalLimitSet", "optionalHelp", "optionalHelpRemove", "optionalHelpAll", "optionalFilePin", "optionalFileUnpin", "optionalFileDelete", + "muteAdd", "muteRemove", "siteblockAdd", "siteblockRemove", "filterIncludeAdd", "filterIncludeRemove" + ) + if config.multiuser_no_new_sites: + self.multiuser_denied_cmds += ("mergerSiteAdd", ) + + super(UiWebsocketPlugin, self).__init__(*args, **kwargs) + + # Let the page know we running in multiuser mode + def formatServerInfo(self): + server_info = super(UiWebsocketPlugin, self).formatServerInfo() + server_info["multiuser"] = True + if "ADMIN" in self.site.settings["permissions"]: + server_info["master_address"] = self.user.master_address + return server_info + + # Show current user's master seed + def actionUserShowMasterSeed(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Show master seed not allowed") + message = "Your unique private key:" + message += "
    %s
    " % self.user.master_seed + message += "(Save it, you can access your account using this information)" + self.cmd("notification", ["info", message]) + + # Logout user + def actionUserLogout(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Logout not allowed") + message = "You have been logged out. Login to another account" + self.cmd("notification", ["done", message, 1000000]) # 1000000 = Show ~forever :) + + script = "document.cookie = 'master_address=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/';" + script += "$('#button_notification').on('click', function() { zeroframe.cmd(\"userLoginForm\", []); });" + self.cmd("injectScript", script) + # Delete from user_manager + user_manager = UserManager.user_manager + if self.user.master_address in user_manager.users: + if not config.multiuser_local: + del user_manager.users[self.user.master_address] + self.response(to, "Successful logout") + else: + self.response(to, "User not found") + + # Show login form + def actionUserLoginForm(self, to): + self.cmd("prompt", ["Login
    Your private key:", "password", "Login"], self.responseUserLogin) + + # Login form submit + def responseUserLogin(self, master_seed): + user_manager = UserManager.user_manager + user = user_manager.get(CryptBitcoin.privatekeyToAddress(master_seed)) + if not user: + user = user_manager.create(master_seed=master_seed) + if user.master_address: + script = "document.cookie = 'master_address=%s;path=/;max-age=2592000;';" % user.master_address + script += "zeroframe.cmd('wrapperReload', ['login=done']);" + self.cmd("notification", ["done", "Successful login, reloading page..."]) + self.cmd("injectScript", script) + else: + self.cmd("notification", ["error", "Error: Invalid master seed"]) + self.actionUserLoginForm(0) + + def hasCmdPermission(self, cmd): + cmd = cmd[0].lower() + cmd[1:] + if not config.multiuser_local and self.user.master_address not in local_master_addresses and cmd in self.multiuser_denied_cmds: + self.cmd("notification", ["info", "This function is disabled on this proxy!"]) + return False + else: + return super(UiWebsocketPlugin, self).hasCmdPermission(cmd) + + def actionCertAdd(self, *args, **kwargs): + super(UiWebsocketPlugin, self).actionCertAdd(*args, **kwargs) + master_seed = self.user.master_seed + message = """ + + Hello, welcome to ZeroProxy!
    A new, unique account created for you:
    + + +
    + This is your private key, save it, so you can login next time.
    + Warning: Without this key, your account will be lost forever! +

    + Ok, Saved it!

    + This site allows you to browse ZeroNet content, but if you want to secure your account
    + and help to keep the network alive, then please run your own ZeroNet client.
    + """ + + self.cmd("notification", ["info", message]) + + script = """ + $("#button_notification_masterseed").on("click", function() { + this.value = "{master_seed}"; this.setSelectionRange(0,100); + }) + $("#button_notification_download").on("mousedown", function() { + this.href = window.URL.createObjectURL(new Blob(["ZeroNet user master seed:\\r\\n{master_seed}"])) + }) + """.replace("{master_seed}", master_seed) + self.cmd("injectScript", script) + + + def actionPermissionAdd(self, to, permission): + if permission == "NOSANDBOX": + self.cmd("notification", ["info", "You can't disable sandbox on this proxy!"]) + self.response(to, {"error": "Denied by proxy"}) + return False + else: + return super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission) + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("Multiuser plugin") + group.add_argument('--multiuser_local', help="Enable unsafe Ui functions and write users to disk", action='store_true') + group.add_argument('--multiuser_no_new_sites', help="Denies adding new sites by normal users", action='store_true') + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/disabled-Multiuser/Test/TestMultiuser.py b/plugins/disabled-Multiuser/Test/TestMultiuser.py new file mode 100644 index 00000000..b8ff4267 --- /dev/null +++ b/plugins/disabled-Multiuser/Test/TestMultiuser.py @@ -0,0 +1,14 @@ +import pytest +import json +from Config import config +from User import UserManager + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestMultiuser: + def testMemorySave(self, user): + # It should not write users to disk + users_before = open("%s/users.json" % config.data_dir).read() + user = UserManager.user_manager.create() + user.save() + assert open("%s/users.json" % config.data_dir).read() == users_before diff --git a/plugins/disabled-Multiuser/Test/conftest.py b/plugins/disabled-Multiuser/Test/conftest.py new file mode 100644 index 00000000..634e66e2 --- /dev/null +++ b/plugins/disabled-Multiuser/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * diff --git a/plugins/disabled-Multiuser/Test/pytest.ini b/plugins/disabled-Multiuser/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/disabled-Multiuser/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/disabled-Multiuser/UserPlugin.py b/plugins/disabled-Multiuser/UserPlugin.py new file mode 100644 index 00000000..3c9ebae8 --- /dev/null +++ b/plugins/disabled-Multiuser/UserPlugin.py @@ -0,0 +1,35 @@ +from Config import config +from Plugin import PluginManager + +allow_reload = False + +@PluginManager.registerTo("UserManager") +class UserManagerPlugin(object): + def load(self): + if not config.multiuser_local: + # In multiuser mode do not load the users + if not self.users: + self.users = {} + return self.users + else: + return super(UserManagerPlugin, self).load() + + # Find user by master address + # Return: User or None + def get(self, master_address=None): + users = self.list() + if master_address in users: + user = users[master_address] + else: + user = None + return user + + +@PluginManager.registerTo("User") +class UserPlugin(object): + # In multiuser mode users data only exits in memory, dont write to data/user.json + def save(self): + if not config.multiuser_local: + return False + else: + return super(UserPlugin, self).save() diff --git a/plugins/disabled-Multiuser/__init__.py b/plugins/disabled-Multiuser/__init__.py new file mode 100644 index 00000000..c56ddf84 --- /dev/null +++ b/plugins/disabled-Multiuser/__init__.py @@ -0,0 +1 @@ +from . import MultiuserPlugin diff --git a/plugins/disabled-StemPort/StemPortPlugin.py b/plugins/disabled-StemPort/StemPortPlugin.py new file mode 100644 index 00000000..c53d38e6 --- /dev/null +++ b/plugins/disabled-StemPort/StemPortPlugin.py @@ -0,0 +1,135 @@ +import logging +import traceback + +import socket +import stem +from stem import Signal +from stem.control import Controller +from stem.socket import ControlPort + +from Plugin import PluginManager +from Config import config +from Debug import Debug + +if config.tor != "disable": + from gevent import monkey + monkey.patch_time() + monkey.patch_socket(dns=False) + monkey.patch_thread() + print("Stem Port Plugin: modules are patched.") +else: + print("Stem Port Plugin: Tor mode disabled. Module patching skipped.") + + +class PatchedControlPort(ControlPort): + def _make_socket(self): + try: + if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one + control_socket = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) + else: + control_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + # TODO: repeated code - consider making a separate method + + control_socket.connect((self._control_addr, self._control_port)) + return control_socket + except socket.error as exc: + raise stem.SocketError(exc) + +def from_port(address = '127.0.0.1', port = 'default'): + import stem.connection + + if not stem.util.connection.is_valid_ipv4_address(address): + raise ValueError('Invalid IP address: %s' % address) + elif port != 'default' and not stem.util.connection.is_valid_port(port): + raise ValueError('Invalid port: %s' % port) + + if port == 'default': + raise ValueError('Must specify a port') + else: + control_port = PatchedControlPort(address, port) + + return Controller(control_port) + + +@PluginManager.registerTo("TorManager") +class TorManagerPlugin(object): + + def connectController(self): + self.log.info("Authenticate using Stem... %s:%s" % (self.ip, self.port)) + + try: + with self.lock: + if config.tor_password: + controller = from_port(port=self.port, password=config.tor_password) + else: + controller = from_port(port=self.port) + controller.authenticate() + self.controller = controller + self.status = "Connected (via Stem)" + except Exception as err: + print("\n") + traceback.print_exc() + print("\n") + + self.controller = None + self.status = "Error (%s)" % err + self.log.error("Tor stem connect error: %s" % Debug.formatException(err)) + + return self.controller + + + def disconnect(self): + self.controller.close() + self.controller = None + + + def resetCircuits(self): + try: + self.controller.signal(Signal.NEWNYM) + except Exception as err: + self.status = "Stem reset circuits error (%s)" % err + self.log.error("Stem reset circuits error: %s" % err) + + + def makeOnionAndKey(self): + try: + service = self.controller.create_ephemeral_hidden_service( + {self.fileserver_port: self.fileserver_port}, + await_publication = False + ) + if service.private_key_type != "RSA1024": + raise Exception("ZeroNet doesn't support crypto " + service.private_key_type) + + self.log.debug("Stem created %s.onion (async descriptor publication)" % service.service_id) + + return (service.service_id, service.private_key) + + except Exception as err: + self.status = "AddOnion error (Stem: %s)" % err + self.log.error("Failed to create hidden service with Stem: " + err) + return False + + + def delOnion(self, address): + try: + self.controller.remove_ephemeral_hidden_service(address) + return True + except Exception as err: + self.status = "DelOnion error (Stem: %s)" % err + self.log.error("Stem failed to delete %s.onion: %s" % (address, err)) + self.disconnect() # Why? + return False + + + def request(self, cmd): + with self.lock: + if not self.enabled: + return False + else: + self.log.error("[WARNING] StemPort self.request should not be called") + return "" + + def send(self, cmd, conn=None): + self.log.error("[WARNING] StemPort self.send should not be called") + return "" diff --git a/plugins/disabled-StemPort/__init__.py b/plugins/disabled-StemPort/__init__.py new file mode 100644 index 00000000..33f8e034 --- /dev/null +++ b/plugins/disabled-StemPort/__init__.py @@ -0,0 +1,10 @@ +try: + from stem.control import Controller + stem_found = True +except Exception as err: + print(("STEM NOT FOUND! %s" % err)) + stem_found = False + +if stem_found: + print("Starting Stem plugin...") + from . import StemPortPlugin diff --git a/plugins/disabled-UiPassword/UiPasswordPlugin.py b/plugins/disabled-UiPassword/UiPasswordPlugin.py new file mode 100644 index 00000000..944804d7 --- /dev/null +++ b/plugins/disabled-UiPassword/UiPasswordPlugin.py @@ -0,0 +1,137 @@ +import string +import random +import time +import json +import re + + +from Config import config +from Plugin import PluginManager +from util import helper + +if "sessions" not in locals().keys(): # To keep sessions between module reloads + sessions = {} + + +def showPasswordAdvice(password): + error_msgs = [] + if not password or not isinstance(password, str): + error_msgs.append("You have enabled UiPassword plugin, but you forgot to set a password!") + elif len(password) < 8: + error_msgs.append("You are using a very short UI password!") + return error_msgs + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + sessions = sessions + last_cleanup = time.time() + + def route(self, path): + # Restict Ui access by ip + if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: + return self.error403(details=False) + if path.endswith("favicon.ico"): + return self.actionFile("src/Ui/media/img/favicon.ico") + else: + if config.ui_password: + if time.time() - self.last_cleanup > 60 * 60: # Cleanup expired sessions every hour + self.cleanup() + # Validate session + session_id = self.getCookies().get("session_id") + if session_id not in self.sessions: # Invalid session id, display login + return self.actionLogin() + return super(UiRequestPlugin, self).route(path) + + # Action: Login + @helper.encodeResponse + def actionLogin(self): + template = open("plugins/UiPassword/login.html").read() + self.sendHeader() + posted = self.getPosted() + if posted: # Validate http posted data + if self.checkPassword(posted.get("password")): + # Valid password, create session + session_id = self.randomString(26) + self.sessions[session_id] = { + "added": time.time(), + "keep": posted.get("keep") + } + + # Redirect to homepage or referer + url = self.env.get("HTTP_REFERER", "") + if not url or re.sub(r"\?.*", "", url).endswith("/Login"): + url = "/" + config.homepage + cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days + self.start_response('301 Redirect', [('Location', url), cookie_header]) + yield "Redirecting..." + + else: + # Invalid password, show login form again + template = template.replace("{result}", "bad_password") + yield template + + def checkPassword(self, password): + return password == config.ui_password + + def randomString(self, nchars): + return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(nchars)) + + @classmethod + def cleanup(cls): + cls.last_cleanup = time.time() + for session_id, session in list(cls.sessions.items()): + if session["keep"] and time.time() - session["added"] > 60 * 60 * 24 * 60: # Max 60days for keep sessions + del(cls.sessions[session_id]) + elif not session["keep"] and time.time() - session["added"] > 60 * 60 * 24: # Max 24h for non-keep sessions + del(cls.sessions[session_id]) + + # Action: Display sessions + def actionSessions(self): + self.sendHeader() + yield "
    "
    +        yield json.dumps(self.sessions, indent=4)
    +
    +    # Action: Logout
    +    def actionLogout(self):
    +        # Session id has to passed as get parameter or called without referer to avoid remote logout
    +        session_id = self.getCookies().get("session_id")
    +        if not self.env.get("HTTP_REFERER") or session_id == self.get.get("session_id"):
    +            if session_id in self.sessions:
    +                del self.sessions[session_id]
    +            self.start_response('301 Redirect', [
    +                ('Location', "/"),
    +                ('Set-Cookie', "session_id=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT")
    +            ])
    +            yield "Redirecting..."
    +        else:
    +            self.sendHeader()
    +            yield "Error: Invalid session id"
    +
    +
    +
    +@PluginManager.registerTo("ConfigPlugin")
    +class ConfigPlugin(object):
    +    def createArguments(self):
    +        group = self.parser.add_argument_group("UiPassword plugin")
    +        group.add_argument('--ui_password', help='Password to access UiServer', default=None, metavar="password")
    +
    +        return super(ConfigPlugin, self).createArguments()
    +
    +
    +from Translate import translate as lang
    +@PluginManager.registerTo("UiWebsocket")
    +class UiWebsocketPlugin(object):
    +    def actionUiLogout(self, to):
    +        permissions = self.getPermissions(to)
    +        if "ADMIN" not in permissions:
    +            return self.response(to, "You don't have permission to run this command")
    +
    +        session_id = self.request.getCookies().get("session_id", "")
    +        self.cmd("redirect", '/Logout?session_id=%s' % session_id)
    +
    +    def addHomepageNotifications(self):
    +        error_msgs = showPasswordAdvice(config.ui_password)
    +        for msg in error_msgs:
    +            self.site.notifications.append(["error", lang[msg]])
    +
    +        return super(UiWebsocketPlugin, self).addHomepageNotifications()
    diff --git a/plugins/disabled-UiPassword/__init__.py b/plugins/disabled-UiPassword/__init__.py
    new file mode 100644
    index 00000000..1779c597
    --- /dev/null
    +++ b/plugins/disabled-UiPassword/__init__.py
    @@ -0,0 +1 @@
    +from . import UiPasswordPlugin
    \ No newline at end of file
    diff --git a/plugins/disabled-UiPassword/login.html b/plugins/disabled-UiPassword/login.html
    new file mode 100644
    index 00000000..12d0889d
    --- /dev/null
    +++ b/plugins/disabled-UiPassword/login.html
    @@ -0,0 +1,116 @@
    +
    +
    + Log In
    + 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    diff --git a/plugins/disabled-ZeronameLocal/SiteManagerPlugin.py b/plugins/disabled-ZeronameLocal/SiteManagerPlugin.py
    new file mode 100644
    index 00000000..579e31c1
    --- /dev/null
    +++ b/plugins/disabled-ZeronameLocal/SiteManagerPlugin.py
    @@ -0,0 +1,180 @@
    +import logging, json, os, re, sys, time, socket
    +from Plugin import PluginManager
    +from Config import config
    +from Debug import Debug
    +from http.client import HTTPSConnection, HTTPConnection, HTTPException
    +from base64 import b64encode
    +
    +allow_reload = False # No reload supported
    +
    +@PluginManager.registerTo("SiteManager")
    +class SiteManagerPlugin(object):
    +    def load(self, *args, **kwargs):
    +        super(SiteManagerPlugin, self).load(*args, **kwargs)
    +        self.log = logging.getLogger("ZeronetLocal Plugin")
    +        self.error_message = None
    +        if not config.namecoin_host or not config.namecoin_rpcport or not config.namecoin_rpcuser or not config.namecoin_rpcpassword:
    +            self.error_message = "Missing parameters"
    +            self.log.error("Missing parameters to connect to namecoin node. Please check all the arguments needed with '--help'. Zeronet will continue working without it.")
    +            return
    +
    +        url = "%(host)s:%(port)s" % {"host": config.namecoin_host, "port": config.namecoin_rpcport}
    +        self.c = HTTPConnection(url, timeout=3)
    +        user_pass = "%(user)s:%(password)s" % {"user": config.namecoin_rpcuser, "password": config.namecoin_rpcpassword}
    +        userAndPass = b64encode(bytes(user_pass, "utf-8")).decode("ascii")
    +        self.headers = {"Authorization" : "Basic %s" %  userAndPass, "Content-Type": " application/json " }
    +
    +        payload = json.dumps({
    +            "jsonrpc": "2.0",
    +            "id": "zeronet",
    +            "method": "ping",
    +            "params": []
    +        })
    +
    +        try:
    +            self.c.request("POST", "/", payload, headers=self.headers)
    +            response = self.c.getresponse()
    +            data = response.read()
    +            self.c.close()
    +            if response.status == 200:
    +                result = json.loads(data.decode())["result"]
    +            else:
    +                raise Exception(response.reason)
    +        except Exception as err:
    +            self.log.error("The Namecoin node is unreachable. Please check the configuration value are correct. Zeronet will continue working without it.")
    +            self.error_message = err
    +        self.cache = dict()
    +
    +    # Checks if it's a valid address
    +    def isAddress(self, address):
    +        return self.isBitDomain(address) or super(SiteManagerPlugin, self).isAddress(address)
    +
    +    # Return: True if the address is domain
    +    def isDomain(self, address):
    +        return self.isBitDomain(address) or super(SiteManagerPlugin, self).isDomain(address)
    +
    +    # Return: True if the address is .bit domain
    +    def isBitDomain(self, address):
    +        return re.match(r"(.*?)([A-Za-z0-9_-]+\.bit)$", address)
    +
    +    # Return: Site object or None if not found
    +    def get(self, address):
    +        if self.isBitDomain(address):  # Its looks like a domain
    +            address_resolved = self.resolveDomain(address)
    +            if address_resolved:  # Domain found
    +                site = self.sites.get(address_resolved)
    +                if site:
    +                    site_domain = site.settings.get("domain")
    +                    if site_domain != address:
    +                        site.settings["domain"] = address
    +            else:  # Domain not found
    +                site = self.sites.get(address)
    +
    +        else:  # Access by site address
    +            site = super(SiteManagerPlugin, self).get(address)
    +        return site
    +
    +    # Return or create site and start download site files
    +    # Return: Site or None if dns resolve failed
    +    def need(self, address, *args, **kwargs):
    +        if self.isBitDomain(address):  # Its looks like a domain
    +            address_resolved = self.resolveDomain(address)
    +            if address_resolved:
    +                address = address_resolved
    +            else:
    +                return None
    +
    +        return super(SiteManagerPlugin, self).need(address, *args, **kwargs)
    +
    +    # Resolve domain
    +    # Return: The address or None
    +    def resolveDomain(self, domain):
    +        domain = domain.lower()
    +
    +        #remove .bit on end
    +        if domain[-4:] == ".bit":
    +            domain = domain[0:-4]
    +
    +        domain_array = domain.split(".")
    +
    +        if self.error_message:
    +            self.log.error("Not able to connect to Namecoin node : {!s}".format(self.error_message))
    +            return None
    +
    +        if len(domain_array) > 2:
    +            self.log.error("Too many subdomains! Can only handle one level (eg. staging.mixtape.bit)")
    +            return None
    +
    +        subdomain = ""
    +        if len(domain_array) == 1:
    +            domain = domain_array[0]
    +        else:
    +            subdomain = domain_array[0]
    +            domain = domain_array[1]
    +
    +        if domain in self.cache:
    +            delta = time.time() - self.cache[domain]["time"]
    +            if delta < 3600:
    +                # Must have been less than 1hour
    +                return self.cache[domain]["addresses_resolved"][subdomain]
    +
    +        payload = json.dumps({
    +            "jsonrpc": "2.0",
    +            "id": "zeronet",
    +            "method": "name_show",
    +            "params": ["d/"+domain]
    +        })
    +
    +        try:
    +            self.c.request("POST", "/", payload, headers=self.headers)
    +            response = self.c.getresponse()
    +            data = response.read()
    +            self.c.close()
    +            domain_object = json.loads(data.decode())["result"]
    +        except Exception as err:
    +            #domain doesn't exist
    +            return None
    +
    +        if "zeronet" in domain_object["value"]:
    +            zeronet_domains = json.loads(domain_object["value"])["zeronet"]
    +
    +            if isinstance(zeronet_domains, str):
    +                # {
    +                #    "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9"
    +                # } is valid
    +                zeronet_domains = {"": zeronet_domains}
    +
    +            self.cache[domain] = {"addresses_resolved": zeronet_domains, "time": time.time()}
    +
    +        elif "map" in domain_object["value"]:
    +            # Namecoin standard use {"map": { "blog": {"zeronet": "1D..."} }}
    +            data_map = json.loads(domain_object["value"])["map"]
    +
    +            zeronet_domains = dict()
    +            for subdomain in data_map:
    +                if "zeronet" in data_map[subdomain]:
    +                    zeronet_domains[subdomain] = data_map[subdomain]["zeronet"]
    +            if "zeronet" in data_map and isinstance(data_map["zeronet"], str):
    +                # {"map":{
    +                #    "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9",
    +                # }}
    +                zeronet_domains[""] = data_map["zeronet"]
    +
    +            self.cache[domain] = {"addresses_resolved": zeronet_domains, "time": time.time()}
    +
    +        else:
    +            # No Zeronet address registered
    +            return None
    +
    +        return self.cache[domain]["addresses_resolved"][subdomain]
    +
    +@PluginManager.registerTo("ConfigPlugin")
    +class ConfigPlugin(object):
    +    def createArguments(self):
    +        group = self.parser.add_argument_group("Zeroname Local plugin")
    +        group.add_argument('--namecoin_host', help="Host to namecoin node (eg. 127.0.0.1)")
    +        group.add_argument('--namecoin_rpcport', help="Port to connect (eg. 8336)")
    +        group.add_argument('--namecoin_rpcuser', help="RPC user to connect to the namecoin node (eg. nofish)")
    +        group.add_argument('--namecoin_rpcpassword', help="RPC password to connect to namecoin node")
    +
    +        return super(ConfigPlugin, self).createArguments()
    diff --git a/plugins/disabled-ZeronameLocal/UiRequestPlugin.py b/plugins/disabled-ZeronameLocal/UiRequestPlugin.py
    new file mode 100644
    index 00000000..0ccfb530
    --- /dev/null
    +++ b/plugins/disabled-ZeronameLocal/UiRequestPlugin.py
    @@ -0,0 +1,39 @@
    +import re
    +from Plugin import PluginManager
    +
    +@PluginManager.registerTo("UiRequest")
    +class UiRequestPlugin(object):
    +    def __init__(self, *args, **kwargs):
    +        from Site import SiteManager
    +        self.site_manager = SiteManager.site_manager
    +        super(UiRequestPlugin, self).__init__(*args, **kwargs)
    +
    +
    +    # Media request
    +    def actionSiteMedia(self, path):
    +        match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/"+address+match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output + + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_path = re.sub(r"\?.*", "", referer_path) # Remove http params + + if self.isProxyRequest(): # Match to site domain + referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access + referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1) + else: # Match to request path + referer_site_address = re.match(r"/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") + + if referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False diff --git a/plugins/disabled-ZeronameLocal/__init__.py b/plugins/disabled-ZeronameLocal/__init__.py new file mode 100644 index 00000000..cf724069 --- /dev/null +++ b/plugins/disabled-ZeronameLocal/__init__.py @@ -0,0 +1,2 @@ +from . import UiRequestPlugin +from . import SiteManagerPlugin \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 538a6dfc..1173d695 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,14 @@ -gevent==1.4.0; python_version <= "3.6" -greenlet==0.4.16; python_version <= "3.6" -gevent>=20.9.0; python_version >= "3.7" +gevent>=1.1.0 msgpack>=0.4.4 base58 -merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev +merkletools +pyelliptic==1.5.6 rsa -PySocks>=1.6.8 +PySocks pyasn1 websocket_client -gevent-ws +gevent-websocket +bencode.py coincurve +python-bitcoinlib maxminddb diff --git a/src/Config.py b/src/Config.py index a9208d55..8791e365 100644 --- a/src/Config.py +++ b/src/Config.py @@ -7,34 +7,27 @@ import configparser import logging import logging.handlers import stat -import time class Config(object): def __init__(self, argv): - self.version = "0.9.0" - self.rev = 4630 + self.version = "0.7.0" + self.rev = 4106 self.argv = argv self.action = None - self.test_parser = None self.pending_changes = {} self.need_restart = False self.keys_api_change_allowed = set([ "tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers", - "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline", - "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" - ]) - self.keys_restart_need = set([ - "tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" + "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline" ]) + self.keys_restart_need = set(["tor", "fileserver_port", "fileserver_ip_type"]) self.start_dir = self.getStartDir() self.config_file = self.start_dir + "/zeronet.conf" self.data_dir = self.start_dir + "/data" self.log_dir = self.start_dir + "/log" - self.openssl_lib_file = None - self.openssl_bin_file = None self.trackers_file = False self.createParser() @@ -56,9 +49,7 @@ class Config(object): def getStartDir(self): this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd") - if "--start_dir" in self.argv: - start_dir = self.argv[self.argv.index("--start_dir") + 1] - elif this_file.endswith("/Contents/Resources/core/src/Config.py"): + if this_file.endswith("/Contents/Resources/core/src/Config.py"): # Running as ZeroNet.app if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")): # Runnig from non-writeable directory, put data to Application Support @@ -79,15 +70,16 @@ class Config(object): # Create command line arguments def createArguments(self): - from Crypt import CryptHash - access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled trackers = [ + "zero://boot3rdez4rzn36x.onion:15441", + "zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY + "udp://tracker.coppersurfer.tk:6969", # DE + "udp://tracker.port443.xyz:6969", # UK + "udp://104.238.198.186:8000", # US/LA + "http://tracker2.itzmx.com:6961/announce", # US/LA "http://open.acgnxtracker.com:80/announce", # DE - "http://tracker.bt4g.com:2095/announce", # Cloudflare - "http://tracker.files.fm:6969/announce", - "http://t.publictracker.xyz:6969/announce", - "https://tracker.lilithraws.cf:443/announce", - "https://tracker.babico.name.tr:443/announce", + "http://open.trackerlist.xyz:80/announce", # Cloudflare + "zero://2602:ffc5::c5b2:5360:26312" # US/ATL ] # Platform specific if sys.platform.startswith("win"): @@ -121,8 +113,6 @@ class Config(object): # SiteCreate action = self.subparsers.add_parser("siteCreate", help='Create a new site') - action.register('type', 'bool', self.strToBool) - action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True) # SiteNeedFile action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site') @@ -211,27 +201,19 @@ class Config(object): action = self.subparsers.add_parser("testConnection", help='Testing') action = self.subparsers.add_parser("testAnnounce", help='Testing') - self.test_parser = self.subparsers.add_parser("test", help='Run a test') - self.test_parser.add_argument('test_name', help='Test name', nargs="?") - # self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true') - # Config parameters self.parser.add_argument('--verbose', help='More detailed logging', action='store_true') self.parser.add_argument('--debug', help='Debug mode', action='store_true') - self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true') + self.parser.add_argument('--silent', help='Disable logging to terminal output', action='store_true') self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') - self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true') self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true') - self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path") self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path") self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path") - self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"]) - self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path") - self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"]) + self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR"]) self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"]) self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int) @@ -244,14 +226,13 @@ class Config(object): self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') - self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d', + self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D', metavar='address') - self.parser.add_argument('--updatesite', help='Source code update site', default='1Update8crprmciJHwp2WXqkx2c4iYp18', + self.parser.add_argument('--updatesite', help='Source code update site', default='1uPDaT3uSyWAPdCv1WkMb5hBQjWSNNACf', metavar='address') - self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key') self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source') - self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit') + self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit') self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit') self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit') @@ -269,12 +250,10 @@ class Config(object): self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*') - self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*') + self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path') self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True) self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True) - self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path") - self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path") self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true') self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true') @@ -293,12 +272,6 @@ class Config(object): self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification', type='bool', choices=[True, False], default=fix_float_decimals) self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed") - - self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int) - self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int) - self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int) - self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int) - self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual") self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, @@ -319,27 +292,24 @@ class Config(object): def loadTrackersFile(self): if not self.trackers_file: - self.trackers_file = ["trackers.txt", "{data_dir}/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d/trackers.txt"] + return None + self.trackers = self.arguments.trackers[:] - for trackers_file in self.trackers_file: - try: - if trackers_file.startswith("/"): # Absolute - trackers_file_path = trackers_file - elif trackers_file.startswith("{data_dir}"): # Relative to data_dir - trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir) - else: # Relative to zeronet.py - trackers_file_path = self.start_dir + "/" + trackers_file + try: + if self.trackers_file.startswith("/"): # Absolute + trackers_file_path = self.trackers_file + elif self.trackers_file.startswith("{data_dir}"): # Relative to data_dir + trackers_file_path = self.trackers_file.replace("{data_dir}", self.data_dir) + else: # Relative to zeronet.py + trackers_file_path = self.start_dir + "/" + self.trackers_file - if not os.path.exists(trackers_file_path): - continue - - for line in open(trackers_file_path): - tracker = line.strip() - if "://" in tracker and tracker not in self.trackers: - self.trackers.append(tracker) - except Exception as err: - print("Error loading trackers file: %s" % err) + for line in open(trackers_file_path): + tracker = line.strip() + if "://" in tracker and tracker not in self.trackers: + self.trackers.append(tracker) + except Exception as err: + print("Error loading trackers file: %s" % err) # Find arguments specified for current action def getActionArguments(self): @@ -380,17 +350,8 @@ class Config(object): valid_parameters.append(arg) return valid_parameters + plugin_parameters - def getParser(self, argv): - action = self.getAction(argv) - if not action: - return self.parser - else: - return self.subparsers.choices[action] - # Parse arguments from config file and command line def parse(self, silent=False, parse_config=True): - argv = self.argv[:] # Copy command line arguments - current_parser = self.getParser(argv) if silent: # Don't display messages or quit on unknown parameter original_print_message = self.parser._print_message original_exit = self.parser.exit @@ -398,10 +359,11 @@ class Config(object): def silencer(parser, function_name): parser.exited = True return None - current_parser.exited = False - current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message") - current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit") + self.parser.exited = False + self.parser._print_message = lambda *args, **kwargs: silencer(self.parser, "_print_message") + self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit") + argv = self.argv[:] # Copy command line arguments self.parseCommandline(argv, silent) # Parse argv self.setAttributes() if parse_config: @@ -415,10 +377,10 @@ class Config(object): self.ip_local.append(self.fileserver_ip) if silent: # Restore original functions - if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action + if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action self.action = None - current_parser._print_message = original_print_message - current_parser.exit = original_exit + self.parser._print_message = original_print_message + self.parser.exit = original_exit self.loadTrackersFile() @@ -447,7 +409,7 @@ class Config(object): self.config_file = argv[argv.index("--config_file") + 1] # Load config file if os.path.isfile(self.config_file): - config = configparser.RawConfigParser(allow_no_value=True, strict=False) + config = configparser.ConfigParser(allow_no_value=True) config.read(self.config_file) for section in config.sections(): for key, val in config.items(section): @@ -457,7 +419,7 @@ class Config(object): key = section + "_" + key if key == "open_browser": # Prefer config file value over cli argument - while "--%s" % key in argv: + if "--%s" % key in argv: pos = argv.index("--open_browser") del argv[pos:pos + 2] @@ -471,16 +433,6 @@ class Config(object): argv = argv[:1] + argv_extend + argv[1:] return argv - # Return command line value of given argument - def getCmdlineValue(self, key): - if key not in self.argv: - return None - argv_index = self.argv.index(key) - if argv_index == len(self.argv) - 1: # last arg, test not specified - return None - - return self.argv[argv_index + 1] - # Expose arguments as class attributes def setAttributes(self): # Set attributes from arguments @@ -489,9 +441,8 @@ class Config(object): for key, val in args.items(): if type(val) is list: val = val[:] - if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"): - if val: - val = val.replace("\\", "/") + if key in ("data_dir", "log_dir"): + val = val.replace("\\", "/") setattr(self, key, val) def loadPlugins(self): @@ -500,11 +451,7 @@ class Config(object): @PluginManager.acceptPlugins class ConfigPlugin(object): def __init__(self, config): - self.argv = config.argv self.parser = config.parser - self.subparsers = config.subparsers - self.test_parser = config.test_parser - self.getCmdlineValue = config.getCmdlineValue self.createArguments() def createArguments(self): @@ -525,7 +472,7 @@ class Config(object): for line in lines: if line.strip() == "[global]": global_line_i = i - if line.startswith(key + " =") or line == key: + if line.startswith(key + " ="): key_line_i = i i += 1 @@ -592,15 +539,12 @@ class Config(object): else: format = '%(name)s %(message)s' - if self.console_log_level == "default": - if self.silent: - level = logging.ERROR - elif self.debug: - level = logging.DEBUG - else: - level = logging.INFO + if self.silent: + level = logging.ERROR + elif self.debug: + level = logging.DEBUG else: - level = logging.getLevelName(self.console_log_level) + level = logging.INFO console_logger = logging.StreamHandler() console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S")) @@ -629,13 +573,7 @@ class Config(object): logging.getLogger('').setLevel(logging.getLevelName(self.log_level)) logging.getLogger('').addHandler(file_logger) - def initLogging(self, console_logging=None, file_logging=None): - if console_logging == None: - console_logging = self.console_log_level != "off" - - if file_logging == None: - file_logging = self.log_level != "off" - + def initLogging(self): # Create necessary files and dirs if not os.path.isdir(self.log_dir): os.mkdir(self.log_dir) @@ -649,27 +587,9 @@ class Config(object): logging.addLevelName(15, "WARNING") logging.getLogger('').name = "-" # Remove root prefix + logging.getLogger("geventwebsocket.handler").setLevel(logging.WARNING) # Don't log ws debug messages - self.error_logger = ErrorLogHandler() - self.error_logger.setLevel(logging.getLevelName("ERROR")) - logging.getLogger('').addHandler(self.error_logger) - - if console_logging: - self.initConsoleLogger() - if file_logging: - self.initFileLogger() - - -class ErrorLogHandler(logging.StreamHandler): - def __init__(self): - self.lines = [] - return super().__init__() - - def emit(self, record): - self.lines.append([time.time(), record.levelname, self.format(record)]) - - def onNewRecord(self, record): - pass - + self.initConsoleLogger() + self.initFileLogger() config = Config(sys.argv) diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py index 22bcf29c..8122ec08 100644 --- a/src/Connection/Connection.py +++ b/src/Connection/Connection.py @@ -125,11 +125,11 @@ class Connection(object): self.sock = self.server.tor_manager.createSocket(self.ip, self.port) elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: raise Exception("Can't connect to local IPs in Tor: always mode") - elif config.trackers_proxy != "disable" and config.tor != "always" and self.is_tracker_connection: + elif config.trackers_proxy != "disable" and self.is_tracker_connection: if config.trackers_proxy == "tor": self.sock = self.server.tor_manager.createSocket(self.ip, self.port) else: - import socks + from lib.PySocks import socks self.sock = socks.socksocket() proxy_ip, proxy_port = config.trackers_proxy.split(":") self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py index c9048398..f81dc5f8 100644 --- a/src/Connection/ConnectionServer.py +++ b/src/Connection/ConnectionServer.py @@ -30,9 +30,7 @@ class ConnectionServer(object): port = 15441 self.ip = ip self.port = port - self.last_connection_id = 0 # Connection id incrementer - self.last_connection_id_current_version = 0 # Connection id incrementer for current client version - self.last_connection_id_supported_version = 0 # Connection id incrementer for last supported version + self.last_connection_id = 1 # Connection id incrementer self.log = logging.getLogger("ConnServer") self.port_opened = {} self.peer_blacklist = SiteManager.peer_blacklist @@ -48,8 +46,6 @@ class ConnectionServer(object): self.stream_server = None self.stream_server_proxy = None self.running = False - self.stopping = False - self.thread_checker = None self.stat_recv = defaultdict(lambda: defaultdict(int)) self.stat_sent = defaultdict(lambda: defaultdict(int)) @@ -80,8 +76,6 @@ class ConnectionServer(object): self.handleRequest = request_handler def start(self, check_connections=True): - if self.stopping: - return False self.running = True if check_connections: self.thread_checker = gevent.spawn(self.checkConnections) @@ -105,23 +99,17 @@ class ConnectionServer(object): def listen(self): if not self.running: - return None - + return False if self.stream_server_proxy: gevent.spawn(self.listenProxy) try: self.stream_server.serve_forever() except Exception as err: self.log.info("StreamServer listen error: %s" % err) - return False - self.log.debug("Stopped.") def stop(self): - self.log.debug("Stopping %s" % self.stream_server) - self.stopping = True + self.log.debug("Stopping") self.running = False - if self.thread_checker: - gevent.kill(self.thread_checker) if self.stream_server: self.stream_server.stop() @@ -157,11 +145,6 @@ class ConnectionServer(object): connection = Connection(self, ip, port, sock) self.connections.append(connection) - rev = connection.handshake.get("rev", 0) - if rev >= 4560: - self.last_connection_id_supported_version += 1 - if rev == config.rev: - self.last_connection_id_current_version += 1 if ip not in config.ip_local: self.ips[ip] = connection connection.handleIncomingConnection(sock) @@ -226,12 +209,6 @@ class ConnectionServer(object): if not succ: connection.close("Connection event return error") raise Exception("Connection event return error") - else: - rev = connection.handshake.get("rev", 0) - if rev >= 4560: - self.last_connection_id_supported_version += 1 - if rev == config.rev: - self.last_connection_id_current_version += 1 except Exception as err: connection.close("%s Connect error: %s" % (ip, Debug.formatException(err))) @@ -261,9 +238,9 @@ class ConnectionServer(object): def checkConnections(self): run_i = 0 - time.sleep(15) while self.running: run_i += 1 + time.sleep(15) # Check every minute self.ip_incoming = {} # Reset connected ips counter last_message_time = 0 s = time.time() @@ -340,8 +317,6 @@ class ConnectionServer(object): if time.time() - s > 0.01: self.log.debug("Connection cleanup in %.3fs" % (time.time() - s)) - - time.sleep(15) self.log.debug("Checkconnections ended") @util.Noparallel(blocking=False) @@ -379,7 +354,7 @@ class ConnectionServer(object): for connection in self.connections if connection.handshake.get("time") and connection.last_ping_delay ]) - if len(corrections) < 9: + if len(corrections) < 6: return 0.0 mid = int(len(corrections) / 2 - 1) median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3 diff --git a/src/Content/ContentDb.py b/src/Content/ContentDb.py index f284581e..b42a6e27 100644 --- a/src/Content/ContentDb.py +++ b/src/Content/ContentDb.py @@ -1,6 +1,7 @@ +import time import os -from Db.Db import Db, DbTableError +from Db.Db import Db from Config import config from Plugin import PluginManager from Debug import Debug @@ -11,14 +12,9 @@ class ContentDb(Db): def __init__(self, path): Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) self.foreign_keys = True - - def init(self): try: self.schema = self.getSchema() - try: - self.checkTables() - except DbTableError: - pass + self.checkTables() self.log.debug("Checking foreign keys...") foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone() if foreign_key_error: @@ -26,14 +22,11 @@ class ContentDb(Db): except Exception as err: self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err)) self.close() - os.unlink(self.db_path) # Remove and try again - Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, self.db_path) + os.unlink(path) # Remove and try again + Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) self.foreign_keys = True self.schema = self.getSchema() - try: - self.checkTables() - except DbTableError: - pass + self.checkTables() self.site_ids = {} self.sites = {} @@ -156,7 +149,6 @@ def getContentDb(path=None): path = "%s/content.db" % config.data_dir if path not in content_dbs: content_dbs[path] = ContentDb(path) - content_dbs[path].init() return content_dbs[path] getContentDb() # Pre-connect to default one diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index 623cc707..49d929c8 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -40,7 +40,7 @@ class ContentManager(object): # Load all content.json files def loadContents(self): if len(self.contents) == 0: - self.log.info("ContentDb not initialized, load files from filesystem...") + self.log.debug("ContentDb not initialized, load files from filesystem") self.loadContent(add_bad_files=False, delete_removed_files=False) self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() @@ -83,7 +83,7 @@ class ContentManager(object): for line in open(content_path): if '"modified"' not in line: continue - match = re.search(r"([0-9\.]+),$", line.strip(" \r\n")) + match = re.search("([0-9\.]+),$", line.strip(" \r\n")) if match and float(match.group(1)) <= old_content.get("modified", 0): self.log.debug("%s loadContent same json file, skipping" % content_inner_path) return [], [] @@ -352,7 +352,7 @@ class ContentManager(object): # Returns if file with the given modification date is archived or not def isArchived(self, inner_path, modified): - match = re.match(r"(.*)/(.*?)/", inner_path) + match = re.match("(.*)/(.*?)/", inner_path) if not match: return False user_contents_inner_path = match.group(1) + "/content.json" @@ -430,7 +430,7 @@ class ContentManager(object): back = content["user_contents"] content_inner_path_dir = helper.getDirname(content_inner_path) relative_content_path = inner_path[len(content_inner_path_dir):] - user_auth_address_match = re.match(r"([A-Za-z0-9]+)/.*", relative_content_path) + user_auth_address_match = re.match("([A-Za-z0-9]+)/.*", relative_content_path) if user_auth_address_match: user_auth_address = user_auth_address_match.group(1) back["content_inner_path"] = "%s%s/content.json" % (content_inner_path_dir, user_auth_address) @@ -496,9 +496,9 @@ class ContentManager(object): # Delivered for directory if "inner_path" in parent_content: parent_content_dir = helper.getDirname(parent_content["inner_path"]) - user_address = re.match(r"([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1) + user_address = re.match("([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1) else: - user_address = re.match(r".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) + user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) try: if not content: @@ -595,27 +595,20 @@ class ContentManager(object): return back def isValidRelativePath(self, relative_path): - if ".." in relative_path.replace("\\", "/").split("/"): + if ".." in relative_path: return False elif len(relative_path) > 255: return False - elif relative_path[0] in ("/", "\\"): # Starts with - return False - elif relative_path[-1] in (".", " "): # Ends with - return False - elif re.match(r".*(^|/)(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]|CONOUT\$|CONIN\$)(\.|/|$)", relative_path, re.IGNORECASE): # Protected on Windows - return False else: - return re.match(r"^[^\x00-\x1F\"*:<>?\\|]+$", relative_path) + return re.match("^[a-z\[\]\(\) A-Z0-9~_@=\.\+-/]+$", relative_path) def sanitizePath(self, inner_path): - return re.sub("[\x00-\x1F\"*:<>?\\|]", "", inner_path) + return re.sub("[^a-z\[\]\(\) A-Z0-9_@=\.\+-/]", "", inner_path) # Hash files in directory def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None): files_node = {} files_optional_node = {} - db_inner_path = self.site.storage.getDbFile() if dir_inner_path and not self.isValidRelativePath(dir_inner_path): ignored = True self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path) @@ -631,7 +624,7 @@ class ContentManager(object): elif not self.isValidRelativePath(file_relative_path): ignored = True self.log.error("- [ERROR] Invalid filename: %s" % file_relative_path) - elif dir_inner_path == "" and db_inner_path and file_relative_path.startswith(db_inner_path): + elif dir_inner_path == "" and self.site.storage.getDbFile() and file_relative_path.startswith(self.site.storage.getDbFile()): ignored = True elif optional_pattern and SafeRe.match(optional_pattern, file_relative_path): optional = True @@ -727,6 +720,7 @@ class ContentManager(object): elif "files_optional" in new_content: del new_content["files_optional"] + new_content["modified"] = int(time.time()) # Add timestamp if inner_path == "content.json": new_content["zeronet_version"] = config.version new_content["signs_required"] = content.get("signs_required", 1) @@ -746,11 +740,9 @@ class ContentManager(object): ) self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) - signs_required = 1 if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key, then sign the valid signers - signs_required = new_content["signs_required"] - signers_data = "%s:%s" % (signs_required, ",".join(valid_signers)) + signers_data = "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)) new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey) if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none") @@ -758,32 +750,15 @@ class ContentManager(object): self.log.info("Signing %s..." % inner_path) if "signs" in new_content: - # del(new_content["signs"]) # Delete old signs - old_signs_content = new_content["signs"] - del(new_content["signs"]) - else: - old_signs_content = None + del(new_content["signs"]) # Delete old signs if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility) - if signs_required > 1: - has_valid_sign = False - sign_content = json.dumps(new_content, sort_keys=True) - for signer in valid_signers: - res = CryptBitcoin.verify(sign_content,signer,old_signs_content[signer]); - print(res) - if res: - has_valid_sign = has_valid_sign or res - if has_valid_sign: - new_content["modified"] = content["modified"] - sign_content = json.dumps(new_content, sort_keys=True) - else: - new_content["modified"] = int(time.time()) # Add timestamp - sign_content = json.dumps(new_content, sort_keys=True) + sign_content = json.dumps(new_content, sort_keys=True) sign = CryptBitcoin.sign(sign_content, privatekey) # new_content["signs"] = content.get("signs", {}) # TODO: Multisig if sign: # If signing is successful (not an old address) - new_content["signs"] = old_signs_content or {} + new_content["signs"] = {} new_content["signs"][privatekey_address] = sign self.verifyContent(inner_path, new_content) @@ -818,16 +793,11 @@ class ContentManager(object): # Return: The required number of valid signs for the content.json def getSignsRequired(self, inner_path, content=None): - if not content: - return 1 - return content.get("signs_required", 1) - - def verifyCertSign(self, user_address, user_auth_type, user_name, issuer_address, sign): - from Crypt import CryptBitcoin - cert_subject = "%s#%s/%s" % (user_address, user_auth_type, user_name) - return CryptBitcoin.verify(cert_subject, issuer_address, sign) + return 1 # Todo: Multisig def verifyCert(self, inner_path, content): + from Crypt import CryptBitcoin + rules = self.getRules(inner_path, content) if not rules: @@ -850,7 +820,12 @@ class ContentManager(object): else: raise VerifyError("Invalid cert signer: %s" % domain) - return self.verifyCertSign(rules["user_address"], content["cert_auth_type"], name, cert_address, content["cert_sign"]) + try: + cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name) + result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"]) + except Exception as err: + raise VerifyError("Certificate verify error: %s" % err) + return result # Checks if the content.json content is valid # Return: True or False @@ -883,16 +858,15 @@ class ContentManager(object): if content.get("inner_path") and content["inner_path"] != inner_path: raise VerifyError("Wrong inner_path: %s" % content["inner_path"]) - # If our content.json file bigger than the size limit throw error - if inner_path == "content.json": - content_size_file = len(json.dumps(content, indent=1)) - if content_size_file > site_size_limit: - # Save site size to display warning + # Check total site size limit + if site_size > site_size_limit: + if inner_path == "content.json" and self.site.settings["size"] == 0: + # First content.json download, save site size to display warning self.site.settings["size"] = site_size - task = self.site.worker_manager.tasks.findTask(inner_path) - if task: # Dont try to download from other peers - self.site.worker_manager.failTask(task) - raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit)) + task = self.site.worker_manager.findTask(inner_path) + if task: # Dont try to download from other peers + self.site.worker_manager.failTask(task) + raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit)) # Verify valid filenames for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): @@ -909,7 +883,7 @@ class ContentManager(object): self.site.settings["size_optional"] = site_size_optional return True else: - raise VerifyError("Content verify error") + return False def verifyContentInclude(self, inner_path, content, content_size, content_size_optional): # Load include details @@ -931,12 +905,12 @@ class ContentManager(object): # Filename limit if rules.get("files_allowed"): for file_inner_path in list(content["files"].keys()): - if not SafeRe.match(r"^%s$" % rules["files_allowed"], file_inner_path): + if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path): raise VerifyError("File not allowed: %s" % file_inner_path) if rules.get("files_allowed_optional"): for file_inner_path in list(content.get("files_optional", {}).keys()): - if not SafeRe.match(r"^%s$" % rules["files_allowed_optional"], file_inner_path): + if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path): raise VerifyError("Optional file not allowed: %s" % file_inner_path) # Check if content includes allowed @@ -954,13 +928,10 @@ class ContentManager(object): if type(file) is dict: new_content = file else: - try: - if sys.version_info.major == 3 and sys.version_info.minor < 6: - new_content = json.loads(file.read().decode("utf8")) - else: - new_content = json.load(file) - except Exception as err: - raise VerifyError("Invalid json file: %s" % err) + if sys.version_info.major == 3 and sys.version_info.minor < 6: + new_content = json.loads(file.read().decode("utf8")) + else: + new_content = json.load(file) if inner_path in self.contents: old_content = self.contents.get(inner_path, {"modified": 0}) # Checks if its newer the ours @@ -1008,16 +979,14 @@ class ContentManager(object): if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid raise VerifyError("Invalid cert!") - valid_signs = [] + valid_signs = 0 for address in valid_signers: if address in signs: - result = CryptBitcoin.verify(sign_content, address, signs[address]) - if result: - valid_signs.append(address) - if len(valid_signs) >= signs_required: + valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) + if valid_signs >= signs_required: break # Break if we has enough signs - if len(valid_signs) < signs_required: - raise VerifyError("Valid signs: %s/%s, Valid Signers : %s" % (len(valid_signs), signs_required, valid_signs)) + if valid_signs < signs_required: + raise VerifyError("Valid signs: %s/%s" % (valid_signs, signs_required)) else: return self.verifyContent(inner_path, new_content) else: # Old style signing diff --git a/src/Crypt/Crypt.py b/src/Crypt/Crypt.py deleted file mode 100644 index 7d7d3659..00000000 --- a/src/Crypt/Crypt.py +++ /dev/null @@ -1,4 +0,0 @@ -from Config import config -from util import ThreadPool - -thread_pool_crypt = ThreadPool.ThreadPool(config.threads_crypt) \ No newline at end of file diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py index 68b2caa2..a558e676 100644 --- a/src/Crypt/CryptBitcoin.py +++ b/src/Crypt/CryptBitcoin.py @@ -1,39 +1,25 @@ import logging import base64 -import binascii -import time -import hashlib -from util.Electrum import dbl_format +from util import OpensslFindPatch +from lib import pybitcointools as btctools from Config import config -import util.OpensslFindPatch +lib_verify_best = "btctools" -lib_verify_best = "sslcrypto" -from lib import sslcrypto -sslcurve_native = sslcrypto.ecc.get_curve("secp256k1") -sslcurve_fallback = sslcrypto.fallback.ecc.get_curve("secp256k1") -sslcurve = sslcurve_native - -def loadLib(lib_name, silent=False): - global sslcurve, libsecp256k1message, lib_verify_best +def loadLib(lib_name): + global bitcoin, libsecp256k1message, lib_verify_best if lib_name == "libsecp256k1": - s = time.time() from lib import libsecp256k1message - import coincurve lib_verify_best = "libsecp256k1" - if not silent: - logging.info( - "Libsecpk256k1 loaded: %s in %.3fs" % - (type(coincurve._libsecp256k1.lib).__name__, time.time() - s) - ) - elif lib_name == "sslcrypto": - sslcurve = sslcurve_native - if sslcurve_native == sslcurve_fallback: - logging.warning("SSLCurve fallback loaded instead of native") - elif lib_name == "sslcrypto_fallback": - sslcurve = sslcurve_fallback + logging.info("Libsecpk256k1 loaded") + elif lib_name == "openssl": + import bitcoin.signmessage + import bitcoin.core.key + import bitcoin.wallet + + logging.info("OpenSSL loaded, version: %.9X" % bitcoin.core.key._ssl.SSLeay()) try: if not config.use_libsecp256k1: @@ -41,30 +27,35 @@ try: loadLib("libsecp256k1") lib_verify_best = "libsecp256k1" except Exception as err: - logging.info("Libsecp256k1 load failed: %s" % err) + logging.info("Libsecp256k1 load failed: %s, try to load OpenSSL" % err) + try: + if not config.use_openssl: + raise Exception("Disabled by config") + loadLib("openssl") + lib_verify_best = "openssl" + except Exception as err: + logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err) -def newPrivatekey(): # Return new private key - return sslcurve.private_to_wif(sslcurve.new_private_key()).decode() +def newPrivatekey(uncompressed=True): # Return new private key + privatekey = btctools.encode_privkey(btctools.random_key(), "wif") + return privatekey def newSeed(): - return binascii.hexlify(sslcurve.new_private_key()).decode() + return btctools.random_key() def hdPrivatekey(seed, child): - # Too large child id could cause problems - privatekey_bin = sslcurve.derive_child(seed.encode(), child % 100000000) - return sslcurve.private_to_wif(privatekey_bin).decode() + masterkey = btctools.bip32_master_key(bytes(seed, "ascii")) + childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems + key = btctools.bip32_extract_key(childkey) + return btctools.encode_privkey(key, "wif") def privatekeyToAddress(privatekey): # Return address from private key try: - if len(privatekey) == 64: - privatekey_bin = bytes.fromhex(privatekey) - else: - privatekey_bin = sslcurve.wif_to_private(privatekey.encode()) - return sslcurve.private_to_address(privatekey_bin).decode() + return btctools.privkey_to_address(privatekey) except Exception: # Invalid privatekey return False @@ -72,12 +63,8 @@ def privatekeyToAddress(privatekey): # Return address from private key def sign(data, privatekey): # Return sign to data using private key if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported - return base64.b64encode(sslcurve.sign( - data.encode(), - sslcurve.wif_to_private(privatekey.encode()), - recoverable=True, - hash=dbl_format - )).decode() + sign = btctools.ecdsa_sign(data, privatekey) + return sign def verify(data, valid_address, sign, lib_verify=None): # Verify data using address and sign @@ -89,9 +76,17 @@ def verify(data, valid_address, sign, lib_verify=None): # Verify data using add if lib_verify == "libsecp256k1": sign_address = libsecp256k1message.recover_address(data.encode("utf8"), sign).decode("utf8") - elif lib_verify in ("sslcrypto", "sslcrypto_fallback"): - publickey = sslcurve.recover(base64.b64decode(sign), data.encode(), hash=dbl_format) - sign_address = sslcurve.public_to_address(publickey).decode() + elif lib_verify == "openssl": + sig = base64.b64decode(sign) + message = bitcoin.signmessage.BitcoinMessage(data) + hash = message.GetHash() + + pubkey = bitcoin.core.key.CPubKey.recover_compact(hash, sig) + + sign_address = str(bitcoin.wallet.P2PKHBitcoinAddress.from_pubkey(pubkey)) + elif lib_verify == "btctools": # Use pure-python + pub = btctools.ecdsa_recover(data, sign) + sign_address = btctools.pubtoaddr(pub) else: raise Exception("No library enabled for signature verification") diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py index c0903e84..6cedaeca 100644 --- a/src/Crypt/CryptConnection.py +++ b/src/Crypt/CryptConnection.py @@ -11,23 +11,13 @@ from util import helper class CryptConnectionManager: def __init__(self): - if config.openssl_bin_file: - self.openssl_bin = config.openssl_bin_file - elif sys.platform.startswith("win"): + # OpenSSL params + if sys.platform.startswith("win"): self.openssl_bin = "tools\\openssl\\openssl.exe" - elif config.dist_type.startswith("bundle_linux"): - self.openssl_bin = "../runtime/bin/openssl" else: self.openssl_bin = "openssl" - - self.context_client = None - self.context_server = None - - self.openssl_conf_template = "src/lib/openssl/openssl.cnf" - self.openssl_conf = config.data_dir + "/openssl.cnf" - self.openssl_env = { - "OPENSSL_CONF": self.openssl_conf, + "OPENSSL_CONF": "src/lib/openssl/openssl.cnf", "RANDFILE": config.data_dir + "/openssl-rand.tmp" } @@ -39,44 +29,6 @@ class CryptConnectionManager: self.cert_csr = config.data_dir + "/cert-rsa.csr" self.key_pem = config.data_dir + "/key-rsa.pem" - self.log = logging.getLogger("CryptConnectionManager") - self.log.debug("Version: %s" % ssl.OPENSSL_VERSION) - - self.fakedomains = [ - "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com", - "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com", - "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp", - "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com", - "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com", - "nazwa.pl", "symantec.com" - ] - - def createSslContexts(self): - if self.context_server and self.context_client: - return False - ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:" - ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" - - if hasattr(ssl, "PROTOCOL_TLS"): - protocol = ssl.PROTOCOL_TLS - else: - protocol = ssl.PROTOCOL_TLSv1_2 - self.context_client = ssl.SSLContext(protocol) - self.context_client.check_hostname = False - self.context_client.verify_mode = ssl.CERT_NONE - - self.context_server = ssl.SSLContext(protocol) - self.context_server.load_cert_chain(self.cert_pem, self.key_pem) - - for ctx in (self.context_client, self.context_server): - ctx.set_ciphers(ciphers) - ctx.options |= ssl.OP_NO_COMPRESSION - try: - ctx.set_alpn_protocols(["h2", "http/1.1"]) - ctx.set_npn_protocols(["h2", "http/1.1"]) - except Exception: - pass - # Select crypt that supported by both sides # Return: Name of the crypto def selectCrypt(self, client_supported): @@ -89,14 +41,18 @@ class CryptConnectionManager: # Return: wrapped socket def wrapSocket(self, sock, crypt, server=False, cert_pin=None): if crypt == "tls-rsa": + ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:" + ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" if server: - sock_wrapped = self.context_server.wrap_socket(sock, server_side=True) + sock_wrapped = ssl.wrap_socket( + sock, server_side=server, keyfile=self.key_pem, + certfile=self.cert_pem, ciphers=ciphers + ) else: - sock_wrapped = self.context_client.wrap_socket(sock, server_hostname=random.choice(self.fakedomains)) + sock_wrapped = ssl.wrap_socket(sock, ciphers=ciphers) if cert_pin: cert_hash = hashlib.sha256(sock_wrapped.getpeercert(True)).hexdigest() - if cert_hash != cert_pin: - raise Exception("Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin)) + assert cert_hash == cert_pin, "Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin) return sock_wrapped else: return sock @@ -126,46 +82,42 @@ class CryptConnectionManager: "/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA", "/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA" ] - self.openssl_env['CN'] = random.choice(self.fakedomains) - environ = os.environ - environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF'] - environ['RANDFILE'] = self.openssl_env['RANDFILE'] - environ['CN'] = self.openssl_env['CN'] + fakedomains = [ + "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com", + "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com", + "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp", + "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com", + "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com", + "nazwa.pl", "symantec.com" + ] + self.openssl_env['CN'] = random.choice(fakedomains) if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): - self.createSslContexts() return True # Files already exits import subprocess - # Replace variables in config template - conf_template = open(self.openssl_conf_template).read() - conf_template = conf_template.replace("$ENV::CN", self.openssl_env['CN']) - open(self.openssl_conf, "w").write(conf_template) - # Generate CAcert and CAkey cmd_params = helper.shellquote( self.openssl_bin, - self.openssl_conf, + self.openssl_env["OPENSSL_CONF"], random.choice(casubjects), self.cakey_pem, self.cacert_pem ) cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -config %s -subj %s -keyout %s -out %s -batch" % cmd_params - self.log.debug("Generating RSA CAcert and CAkey PEM files...") - self.log.debug("Running: %s" % cmd) + logging.debug("Generating RSA CAcert and CAkey PEM files...") proc = subprocess.Popen( cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ + stdout=subprocess.PIPE, env=self.openssl_env ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") + back = proc.stdout.read().strip().decode().replace("\r", "") proc.wait() + logging.debug("%s\n%s" % (cmd, back)) if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)): - self.log.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist. (%s)" % back) + logging.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist.") return False - else: - self.log.debug("Result: %s" % back) # Generate certificate key and signing request cmd_params = helper.shellquote( @@ -173,17 +125,17 @@ class CryptConnectionManager: self.key_pem, self.cert_csr, "/CN=" + self.openssl_env['CN'], - self.openssl_conf, + self.openssl_env["OPENSSL_CONF"], ) cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % cmd_params - self.log.debug("Generating certificate key and signing request...") + logging.debug("Generating certificate key and signing request...") proc = subprocess.Popen( cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ + stdout=subprocess.PIPE, env=self.openssl_env ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") + back = proc.stdout.read().strip().decode().replace("\r", "") proc.wait() - self.log.debug("Running: %s\n%s" % (cmd, back)) + logging.debug("%s\n%s" % (cmd, back)) # Sign request and generate certificate cmd_params = helper.shellquote( @@ -192,30 +144,21 @@ class CryptConnectionManager: self.cacert_pem, self.cakey_pem, self.cert_pem, - self.openssl_conf + self.openssl_env["OPENSSL_CONF"] ) cmd = "%s x509 -req -in %s -CA %s -CAkey %s -set_serial 01 -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % cmd_params - self.log.debug("Generating RSA cert...") + logging.debug("Generating RSA cert...") proc = subprocess.Popen( cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ + stdout=subprocess.PIPE, env=self.openssl_env ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") + back = proc.stdout.read().strip().decode().replace("\r", "") proc.wait() - self.log.debug("Running: %s\n%s" % (cmd, back)) + logging.debug("%s\n%s" % (cmd, back)) if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): - self.createSslContexts() - - # Remove no longer necessary files - os.unlink(self.openssl_conf) - os.unlink(self.cacert_pem) - os.unlink(self.cakey_pem) - os.unlink(self.cert_csr) - return True else: - self.log.error("RSA ECC SSL cert generation failed, cert or key files not exist.") - + logging.error("RSA ECC SSL cert generation failed, cert or key files not exist.") manager = CryptConnectionManager() diff --git a/src/Crypt/CryptTor.py b/src/Crypt/CryptRsa.py similarity index 51% rename from src/Crypt/CryptTor.py rename to src/Crypt/CryptRsa.py index 78ba6fc2..494c4d24 100644 --- a/src/Crypt/CryptTor.py +++ b/src/Crypt/CryptRsa.py @@ -4,17 +4,7 @@ import hashlib def sign(data, privatekey): import rsa from rsa import pkcs1 - from lib import Ed25519 - ## Onion Service V3 - if len(privatekey) == 88: - prv_key = base64.b64decode(privatekey) - pub_key = Ed25519.publickey_unsafe(prv_key) - sign = Ed25519.signature_unsafe(data, prv_key, pub_key) - - return sign - - ## Onion Service V2 if "BEGIN RSA PRIVATE KEY" not in privatekey: privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey @@ -25,61 +15,24 @@ def sign(data, privatekey): def verify(data, publickey, sign): import rsa from rsa import pkcs1 - from lib import Ed25519 - ## Onion Service V3 - if len(publickey) == 32: - - try: - valid = Ed25519.checkvalid(sign, data, publickey) - valid = 'SHA-256' - - except Exception as err: - print(err) - valid = False - - return valid - - ## Onion Service V2 pub = rsa.PublicKey.load_pkcs1(publickey, format="DER") - try: valid = rsa.pkcs1.verify(data, sign, pub) - except pkcs1.VerificationError: valid = False - return valid def privatekeyToPublickey(privatekey): import rsa from rsa import pkcs1 - from lib import Ed25519 - ## Onion Service V3 - if len(privatekey) == 88: - prv_key = base64.b64decode(privatekey) - pub_key = Ed25519.publickey_unsafe(prv_key) - - return pub_key - - ## Onion Service V2 if "BEGIN RSA PRIVATE KEY" not in privatekey: privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey priv = rsa.PrivateKey.load_pkcs1(privatekey) pub = rsa.PublicKey(priv.n, priv.e) - return pub.save_pkcs1("DER") def publickeyToOnion(publickey): - from lib import Ed25519 - - ## Onion Service V3 - if len(publickey) == 32: - addr = Ed25519.publickey_to_onionaddress(publickey)[:-6] - - return addr - - ## Onion Service V2 return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii") diff --git a/src/Db/Db.py b/src/Db/Db.py index d1d9ce15..4202e1c1 100644 --- a/src/Db/Db.py +++ b/src/Db/Db.py @@ -5,10 +5,7 @@ import logging import re import os import atexit -import threading import sys -import weakref -import errno import gevent @@ -16,12 +13,7 @@ from Debug import Debug from .DbCursor import DbCursor from util import SafeRe from util import helper -from util import ThreadPool -from Config import config -thread_pool_db = ThreadPool.ThreadPool(config.threads_db) - -next_db_id = 0 opened_dbs = [] @@ -32,7 +24,7 @@ def dbCleanup(): for db in opened_dbs[:]: idle = time.time() - db.last_query_time if idle > 60 * 5 and db.close_idle: - db.close("Cleanup") + db.close() def dbCommitCheck(): @@ -50,36 +42,24 @@ def dbCommitCheck(): def dbCloseAll(): for db in opened_dbs[:]: - db.close("Close all") - + db.close() gevent.spawn(dbCleanup) gevent.spawn(dbCommitCheck) atexit.register(dbCloseAll) -class DbTableError(Exception): - def __init__(self, message, table): - super().__init__(message) - self.table = table - - class Db(object): def __init__(self, schema, db_path, close_idle=False): - global next_db_id self.db_path = db_path self.db_dir = os.path.dirname(db_path) + "/" self.schema = schema self.schema["version"] = self.schema.get("version", 1) self.conn = None self.cur = None - self.cursors = weakref.WeakSet() - self.id = next_db_id - next_db_id += 1 self.progress_sleeping = False - self.commiting = False - self.log = logging.getLogger("Db#%s:%s" % (self.id, schema["db_name"])) + self.log = logging.getLogger("Db:%s" % schema["db_name"]) self.table_names = None self.collect_stats = False self.foreign_keys = False @@ -92,51 +72,27 @@ class Db(object): self.last_query_time = time.time() self.last_sleep_time = time.time() self.num_execute_since_sleep = 0 - self.lock = ThreadPool.Lock() - self.connect_lock = ThreadPool.Lock() def __repr__(self): return "" % (id(self), self.db_path, self.close_idle) def connect(self): - self.connect_lock.acquire(True) - try: - if self.conn: - self.log.debug("Already connected, connection ignored") - return - - if self not in opened_dbs: - opened_dbs.append(self) - s = time.time() - try: # Directory not exist yet - os.makedirs(self.db_dir) - self.log.debug("Created Db path: %s" % self.db_dir) - except OSError as err: - if err.errno != errno.EEXIST: - raise err - if not os.path.isfile(self.db_path): - self.log.debug("Db file not exist yet: %s" % self.db_path) - self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED", check_same_thread=False) - self.conn.row_factory = sqlite3.Row - self.conn.set_progress_handler(self.progress, 5000000) - self.conn.execute('PRAGMA journal_mode=WAL') - if self.foreign_keys: - self.conn.execute("PRAGMA foreign_keys = ON") - self.cur = self.getCursor() - - self.log.debug( - "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % - (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) - ) - self.log.debug("Connect by thread: %s" % threading.current_thread().ident) - self.log.debug("Connect called by %s" % Debug.formatStack()) - finally: - self.connect_lock.release() - - def getConn(self): - if not self.conn: - self.connect() - return self.conn + if self not in opened_dbs: + opened_dbs.append(self) + s = time.time() + if not os.path.isdir(self.db_dir): # Directory not exist yet + os.makedirs(self.db_dir) + self.log.debug("Created Db path: %s" % self.db_dir) + if not os.path.isfile(self.db_path): + self.log.debug("Db file not exist yet: %s" % self.db_path) + self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED") + self.conn.row_factory = sqlite3.Row + self.conn.set_progress_handler(self.progress, 5000000) + self.cur = self.getCursor() + self.log.debug( + "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % + (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) + ) def progress(self, *args, **kwargs): self.progress_sleeping = True @@ -149,34 +105,19 @@ class Db(object): self.connect() return self.cur.execute(query, params) - @thread_pool_db.wrap def commit(self, reason="Unknown"): if self.progress_sleeping: self.log.debug("Commit ignored: Progress sleeping") return False - if not self.conn: - self.log.debug("Commit ignored: No connection") - return False - - if self.commiting: - self.log.debug("Commit ignored: Already commiting") - return False - try: s = time.time() - self.commiting = True self.conn.commit() self.log.debug("Commited in %.3fs (reason: %s)" % (time.time() - s, reason)) return True except Exception as err: - if "SQL statements in progress" in str(err): - self.log.warning("Commit delayed: %s (reason: %s)" % (Debug.formatException(err), reason)) - else: - self.log.error("Commit error: %s (reason: %s)" % (Debug.formatException(err), reason)) + self.log.error("Commit error: %s" % err) return False - finally: - self.commiting = False def insertOrUpdate(self, *args, **kwargs): if not self.conn: @@ -213,36 +154,21 @@ class Db(object): self.delayed_queue = [] self.delayed_queue_thread = None - def close(self, reason="Unknown"): - if not self.conn: - return False - self.connect_lock.acquire() + def close(self): s = time.time() if self.delayed_queue: self.processDelayed() if self in opened_dbs: opened_dbs.remove(self) self.need_commit = False - self.commit("Closing: %s" % reason) - self.log.debug("Close called by %s" % Debug.formatStack()) - for i in range(5): - if len(self.cursors) == 0: - break - self.log.debug("Pending cursors: %s" % len(self.cursors)) - time.sleep(0.1 * i) - if len(self.cursors): - self.log.debug("Killing cursors: %s" % len(self.cursors)) - self.conn.interrupt() - + self.commit("Closing") if self.cur: self.cur.close() if self.conn: - ThreadPool.main_loop.call(self.conn.close) + self.conn.close() self.conn = None self.cur = None - self.log.debug("%s closed (reason: %s) in %.3fs, opened: %s" % (self.db_path, reason, time.time() - s, len(opened_dbs))) - self.connect_lock.release() - return True + self.log.debug("%s closed in %.3fs, opened: %s" % (self.db_path, time.time() - s, len(opened_dbs))) # Gets a cursor object to database # Return: Cursor class @@ -250,7 +176,11 @@ class Db(object): if not self.conn: self.connect() - cur = DbCursor(self) + cur = DbCursor(self.conn, self) + cur.execute('PRAGMA journal_mode=WAL') + if self.foreign_keys: + cur.execute("PRAGMA foreign_keys = ON") + return cur def getSharedCursor(self): @@ -326,17 +256,15 @@ class Db(object): # Check schema tables for table_name, table_settings in self.schema.get("tables", {}).items(): try: - indexes = table_settings.get("indexes", []) - version = table_settings.get("schema_changed", 0) changed = cur.needTable( table_name, table_settings["cols"], - indexes, version=version + table_settings.get("indexes", []), version=table_settings.get("schema_changed", 0) ) if changed: changed_tables.append(table_name) except Exception as err: self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err))) - raise DbTableError(err, table_name) + return False self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables)) if changed_tables: diff --git a/src/Db/DbCursor.py b/src/Db/DbCursor.py index acb8846d..274782ec 100644 --- a/src/Db/DbCursor.py +++ b/src/Db/DbCursor.py @@ -1,14 +1,18 @@ import time import re +import gevent from util import helper + # Special sqlite cursor class DbCursor: - def __init__(self, db): + def __init__(self, conn, db): + self.conn = conn self.db = db + self.cursor = conn.cursor() self.logging = False def quoteValue(self, value): @@ -45,8 +49,6 @@ class DbCursor: else: if key.startswith("not__"): query_wheres.append(key.replace("not__", "") + " != ?") - elif key.endswith("__like"): - query_wheres.append(key.replace("__like", "") + " LIKE ?") elif key.endswith(">"): query_wheres.append(key.replace(">", "") + " > ?") elif key.endswith("<"): @@ -83,37 +85,26 @@ class DbCursor: return query, params def execute(self, query, params=None): + if query.upper().strip("; ") == "VACUUM": + self.db.commit("vacuum called") query = query.strip() - while self.db.progress_sleeping or self.db.commiting: + while self.db.progress_sleeping: time.sleep(0.1) self.db.last_query_time = time.time() query, params = self.parseQuery(query, params) - cursor = self.db.getConn().cursor() - self.db.cursors.add(cursor) - if self.db.lock.locked(): - self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock)) + s = time.time() - try: - s = time.time() - self.db.lock.acquire(True) - if query.upper().strip("; ") == "VACUUM": - self.db.commit("vacuum called") - if params: - res = cursor.execute(query, params) - else: - res = cursor.execute(query) - finally: - self.db.lock.release() - - taken_query = time.time() - s - if self.logging or taken_query > 1: - if params: # Query has parameters - self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s)) - else: - self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s)) + if params: # Query has parameters + res = self.cursor.execute(query, params) + if self.logging: + self.db.log.debug(query + " " + str(params) + " (Done in %.4f)" % (time.time() - s)) + else: + res = self.cursor.execute(query) + if self.logging: + self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s)) # Log query stats if self.db.collect_stats: @@ -122,39 +113,12 @@ class DbCursor: self.db.query_stats[query]["call"] += 1 self.db.query_stats[query]["time"] += time.time() - s - query_type = query.split(" ", 1)[0].upper() - is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"] - if not self.db.need_commit and is_update_query: - self.db.need_commit = True + if not self.db.need_commit: + query_type = query.split(" ", 1)[0].upper() + if query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"]: + self.db.need_commit = True - if is_update_query: - return cursor - else: - return res - - def executemany(self, query, params): - while self.db.progress_sleeping or self.db.commiting: - time.sleep(0.1) - - self.db.last_query_time = time.time() - - s = time.time() - cursor = self.db.getConn().cursor() - self.db.cursors.add(cursor) - - try: - self.db.lock.acquire(True) - cursor.executemany(query, params) - finally: - self.db.lock.release() - - taken_query = time.time() - s - if self.logging or taken_query > 0.1: - self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query)) - - self.db.need_commit = True - - return cursor + return res # Creates on updates a database row without incrementing the rowid def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}): @@ -163,11 +127,11 @@ class DbCursor: params = query_sets params.update(query_wheres) - res = self.execute( + self.execute( "UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)), params ) - if res.rowcount == 0: + if self.cursor.rowcount == 0: params.update(oninsert) # Add insert-only fields self.execute("INSERT INTO %s ?" % table, params) @@ -197,7 +161,7 @@ class DbCursor: def needTable(self, table, cols, indexes=None, version=1): current_version = self.db.getTableVersion(table) if int(current_version) < int(version): # Table need update or not extis - self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) + self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) self.createTable(table, cols) if indexes: self.createIndexes(table, indexes) @@ -243,4 +207,4 @@ class DbCursor: return row def close(self): - pass + self.cursor.close() diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py index 0ec42615..4c4099f7 100644 --- a/src/Debug/Debug.py +++ b/src/Debug/Debug.py @@ -6,135 +6,55 @@ from Config import config # Non fatal exception class Notify(Exception): - def __init__(self, message=None): - if message: - self.message = message + def __init__(self, message): + self.message = message def __str__(self): return self.message -# Gevent greenlet.kill accept Exception type -def createNotifyType(message): - return type("Notify", (Notify, ), {"message": message}) - - def formatExceptionMessage(err): err_type = err.__class__.__name__ - if err.args: - err_message = err.args[-1] - else: - err_message = err.__str__() + err_message = str(err.args[-1]) return "%s: %s" % (err_type, err_message) -python_lib_dirs = [path.replace("\\", "/") for path in sys.path if re.sub(r".*[\\/]", "", path) in ("site-packages", "dist-packages")] -python_lib_dirs.append(os.path.dirname(os.__file__).replace("\\", "/")) # TODO: check if returns the correct path for PyPy - -root_dir = os.path.realpath(os.path.dirname(__file__) + "/../../") -root_dir = root_dir.replace("\\", "/") - - -def formatTraceback(items, limit=None, fold_builtin=True): - back = [] - i = 0 - prev_file_title = "" - is_prev_builtin = False - - for path, line in items: - i += 1 - is_last = i == len(items) - path = path.replace("\\", "/") - - if path.startswith("src/gevent/"): - file_title = "/" + path[len("src/gevent/"):] - is_builtin = True - is_skippable_builtin = False - elif path in ("", ""): - file_title = "(importlib)" - is_builtin = True - is_skippable_builtin = True - else: - is_skippable_builtin = False - for base in python_lib_dirs: - if path.startswith(base + "/"): - file_title = path[len(base + "/"):] - module_name, *tail = file_title.split("/") - if module_name.endswith(".py"): - module_name = module_name[:-3] - file_title = "/".join(["<%s>" % module_name] + tail) - is_builtin = True - break - else: - is_builtin = False - for base in (root_dir + "/src", root_dir + "/plugins", root_dir): - if path.startswith(base + "/"): - file_title = path[len(base + "/"):] - break - else: - # For unknown paths, do our best to hide absolute path - file_title = path - for needle in ("/zeronet/", "/core/"): - if needle in file_title.lower(): - file_title = "?/" + file_title[file_title.lower().rindex(needle) + len(needle):] - - # Path compression: A/AB/ABC/X/Y.py -> ABC/X/Y.py - # E.g.: in 'Db/DbCursor.py' the directory part is unnecessary - if not file_title.startswith("/"): - prev_part = "" - for i, part in enumerate(file_title.split("/") + [""]): - if not part.startswith(prev_part): - break - prev_part = part - file_title = "/".join(file_title.split("/")[i - 1:]) - - if is_skippable_builtin and fold_builtin: - pass - elif is_builtin and is_prev_builtin and not is_last and fold_builtin: - if back[-1] != "...": - back.append("...") - else: - if file_title == prev_file_title: - back.append("%s" % line) - else: - back.append("%s line %s" % (file_title, line)) - - prev_file_title = file_title - is_prev_builtin = is_builtin - - if limit and i >= limit: - back.append("...") - break - return back - - def formatException(err=None, format="text"): import traceback if type(err) == Notify: return err - elif type(err) == tuple and err and err[0] is not None: # Passed trackeback info + elif type(err) == tuple and err[0] is not None: # Passed trackeback info exc_type, exc_obj, exc_tb = err err = None else: # No trackeback info passed, get latest exc_type, exc_obj, exc_tb = sys.exc_info() if not err: - if hasattr(err, "message"): - err = exc_obj.message + err = exc_obj.message + tb = [] + for frame in traceback.extract_tb(exc_tb): + path, line, function, text = frame + dir_name, file_name = os.path.split(path.replace("\\", "/")) + plugin_match = re.match(".*/plugins/(.+)$", dir_name) + if plugin_match: + file_title = "%s/%s" % (plugin_match.group(1), file_name) else: - err = exc_obj - - tb = formatTraceback([[frame[0], frame[1]] for frame in traceback.extract_tb(exc_tb)]) + file_title = file_name + tb.append("%s line %s" % (file_title, line)) if format == "html": - return "%s: %s
    %s" % (repr(err), err, " > ".join(tb)) + return "%s: %s
    %s" % (exc_type.__name__, err, " > ".join(tb)) else: return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb)) -def formatStack(limit=None): +def formatStack(): import inspect - tb = formatTraceback([[frame[1], frame[2]] for frame in inspect.stack()[1:]], limit=limit) - return " > ".join(tb) + back = [] + for stack in inspect.stack(): + frame, path, line, function, source, index = stack + file = os.path.split(path)[1] + back.append("%s line %s" % (file, line)) + return " > ".join(back) # Test if gevent eventloop blocks @@ -143,21 +63,14 @@ import gevent import time -num_block = 0 - - def testBlock(): - global num_block logging.debug("Gevent block checker started") last_time = time.time() while 1: time.sleep(1) if time.time() - last_time > 1.1: - logging.debug("Gevent block detected: %.3fs" % (time.time() - last_time - 1)) - num_block += 1 + logging.debug("Gevent block detected: %s" % (time.time() - last_time - 1)) last_time = time.time() - - gevent.spawn(testBlock) diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py index d100a3b8..5ae49121 100644 --- a/src/Debug/DebugHook.py +++ b/src/Debug/DebugHook.py @@ -50,7 +50,7 @@ def handleErrorNotify(*args, **kwargs): if err.__name__ == "KeyboardInterrupt": shutdown("Keyboard interrupt") elif err.__name__ != "Notify": - logging.error("Unhandled exception: %s" % Debug.formatException(args)) + logging.error("Unhandled exception: %s" % [args]) sys.__excepthook__(*args, **kwargs) @@ -73,20 +73,15 @@ else: gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet importlib.reload(gevent) -def handleGreenletError(context, type, value, tb): - if context.__class__ is tuple and context[0].__class__.__name__ == "ThreadPool": - # Exceptions in ThreadPool will be handled in the main Thread - return None - +def handleGreenletError(self, context, type, value, tb): if isinstance(value, str): # Cython can raise errors where the value is a plain string # e.g., AttributeError, "_semaphore.Semaphore has no attr", value = type(value) - - if not issubclass(type, gevent.get_hub().NOT_ERROR): + if not issubclass(type, self.NOT_ERROR): sys.excepthook(type, value, tb) -gevent.get_hub().handle_error = handleGreenletError +gevent.hub.Hub.handle_error = handleGreenletError try: signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM")) diff --git a/src/Debug/DebugLock.py b/src/Debug/DebugLock.py deleted file mode 100644 index 9cf22520..00000000 --- a/src/Debug/DebugLock.py +++ /dev/null @@ -1,24 +0,0 @@ -import time -import logging - -import gevent.lock - -from Debug import Debug - - -class DebugLock: - def __init__(self, log_after=0.01, name="Lock"): - self.name = name - self.log_after = log_after - self.lock = gevent.lock.Semaphore(1) - self.release = self.lock.release - - def acquire(self, *args, **kwargs): - s = time.time() - res = self.lock.acquire(*args, **kwargs) - time_taken = time.time() - s - if time_taken >= self.log_after: - logging.debug("%s: Waited %.3fs after called by %s" % - (self.name, time_taken, Debug.formatStack()) - ) - return res diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py index a892dc56..a24203b9 100644 --- a/src/Debug/DebugMedia.py +++ b/src/Debug/DebugMedia.py @@ -45,7 +45,6 @@ def findCoffeescriptCompiler(): # Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features def merge(merged_path): - merged_path = merged_path.replace("\\", "/") merge_dir = os.path.dirname(merged_path) s = time.time() ext = merged_path.split(".")[-1] @@ -78,10 +77,9 @@ def merge(merged_path): parts = [] s_total = time.time() for file_path in findfiles(merge_dir, find_ext): - file_relative_path = file_path.replace(merge_dir + "/", "") - parts.append(b"\n/* ---- %s ---- */\n\n" % file_relative_path.encode("utf8")) + parts.append(b"\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "").encode("utf8")) if file_path.endswith(".coffee"): # Compile coffee script - if file_path in changed or file_relative_path not in old_parts: # Only recompile if changed or its not compiled before + if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before if config.coffeescript_compiler is None: config.coffeescript_compiler = findCoffeescriptCompiler() if not config.coffeescript_compiler: @@ -92,7 +90,7 @@ def merge(merged_path): file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep)) if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file - command = config.coffeescript_compiler.replace("%s", file_path_escaped) + command = config.coffeescript_compiler % file_path_escaped else: # Put coffeescript file to end command = config.coffeescript_compiler + " " + file_path_escaped @@ -108,14 +106,14 @@ def merge(merged_path): parts.append(out) else: # Put error message in place of source code error = out - logging.error("%s Compile error: %s" % (file_relative_path, error)) + logging.error("%s Compile error: %s" % (file_path, error)) error_escaped = re.escape(error).replace(b"\n", b"\\n").replace(br"\\n", br"\n") parts.append( b"alert('%s compile error: %s');" % - (file_relative_path.encode(), error_escaped) + (file_path.encode(), error_escaped) ) else: # Not changed use the old_part - parts.append(old_parts[file_relative_path]) + parts.append(old_parts[file_path.replace(config.data_dir, "")]) else: # Add to parts parts.append(open(file_path, "rb").read()) diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py index 482c7921..f49dc5e9 100644 --- a/src/Debug/DebugReloader.py +++ b/src/Debug/DebugReloader.py @@ -1,6 +1,5 @@ import logging import time -import os from Config import config @@ -19,9 +18,7 @@ else: class DebugReloader: - def __init__(self, paths=None): - if not paths: - paths = ["src", "plugins", config.data_dir + "/__plugins__"] + def __init__(self, paths=["src", "plugins"]): self.log = logging.getLogger("DebugReloader") self.last_chaged = 0 self.callbacks = [] @@ -31,8 +28,6 @@ class DebugReloader: event_handler.on_modified = event_handler.on_deleted = self.onChanged event_handler.on_created = event_handler.on_moved = self.onChanged for path in paths: - if not os.path.isdir(path): - continue self.log.debug("Adding autoreload: %s" % path) self.observer.schedule(event_handler, path, recursive=True) self.observer.start() @@ -46,14 +41,7 @@ class DebugReloader: if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0: return False self.last_chaged = time.time() - if os.path.isfile(path): - time_modified = os.path.getmtime(path) - else: - time_modified = 0 - self.log.debug("File changed: %s reloading source code (modified %.3fs ago)" % (evt, time.time() - time_modified)) - if time.time() - time_modified > 5: # Probably it's just an attribute change, ignore it - return False - + self.log.debug("File changed: %s reloading source code" % path) time.sleep(0.1) # Wait for lock release for callback in self.callbacks: try: diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index c082c378..0846a714 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -109,38 +109,34 @@ class FileRequest(object): return False inner_path = params.get("inner_path", "") + current_content_modified = site.content_manager.contents.get(inner_path, {}).get("modified", 0) + body = params["body"] + if not inner_path.endswith("content.json"): self.response({"error": "Only content.json update allowed"}) self.connection.badAction(5) return - current_content_modified = site.content_manager.contents.get(inner_path, {}).get("modified", 0) should_validate_content = True if "modified" in params and params["modified"] <= current_content_modified: should_validate_content = False valid = None # Same or earlier content as we have - - body = params["body"] - if not body: # No body sent, we have to download it first - site.log.debug("Missing body from update for file %s, downloading ..." % inner_path) + elif not body: # No body sent, we have to download it first + self.log.debug("Missing body from update, downloading...") peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer try: body = peer.getFile(site.address, inner_path).read() except Exception as err: - site.log.debug("Can't download updated file %s: %s" % (inner_path, err)) - self.response({"error": "Invalid File update: Failed to download updated file content"}) + self.log.debug("Can't download updated file %s: %s" % (inner_path, err)) + self.response({"error": "File invalid update: Can't download updaed file"}) self.connection.badAction(5) return if should_validate_content: try: - if type(body) is str: - body = body.encode() - # elif type(body) is list: - # content = json.loads(bytes(list).decode()) content = json.loads(body.decode()) except Exception as err: - site.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) + self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) self.response({"error": "File invalid JSON"}) self.connection.badAction(5) return @@ -153,7 +149,7 @@ class FileRequest(object): try: valid = site.content_manager.verifyFile(inner_path, content) except Exception as err: - site.log.debug("Update for %s is invalid: %s" % (inner_path, err)) + self.log.debug("Update for %s is invalid: %s" % (inner_path, err)) error = err valid = False @@ -165,19 +161,21 @@ class FileRequest(object): site.onFileDone(inner_path) # Trigger filedone - # Download every changed file from peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer - # On complete publish to other peers - diffs = params.get("diffs", {}) - site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path) + if inner_path.endswith("content.json"): # Download every changed file from peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer + # On complete publish to other peers + diffs = params.get("diffs", {}) + site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=3), "publish_%s" % inner_path) - # Load new content file and download changed files in new thread - def downloader(): - site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) + # Load new content file and download changed files in new thread + def downloader(): + site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) + del self.server.files_parsing[file_uri] + + gevent.spawn(downloader) + else: del self.server.files_parsing[file_uri] - gevent.spawn(downloader) - self.response({"ok": "Thanks, file %s updated!" % inner_path}) self.connection.goodAction() @@ -189,7 +187,7 @@ class FileRequest(object): if inner_path in site.content_manager.contents: peer.last_content_json_update = site.content_manager.contents[inner_path]["modified"] if config.verbose: - site.log.debug( + self.log.debug( "Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) @@ -271,7 +269,7 @@ class FileRequest(object): return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]} except RequestError as err: - self.log.debug("GetFile %s %s %s request error: %s" % (self.connection, params["site"], params["inner_path"], Debug.formatException(err))) + self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err))) self.response({"error": "File read error: %s" % err}) except OSError as err: if config.verbose: diff --git a/src/File/FileServer.py b/src/File/FileServer.py index b7a942fc..5f1d9b47 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -2,7 +2,6 @@ import logging import time import random import socket -import sys import gevent import gevent.pool @@ -49,7 +48,6 @@ class FileServer(ConnectionServer): raise Exception("Can't find bindable port") if not config.tor == "always": config.saveValue("fileserver_port", port) # Save random port value for next restart - config.arguments.fileserver_port = port ConnectionServer.__init__(self, ip, port, self.handleRequest) self.log.debug("Supported IP types: %s" % self.supported_ip_types) @@ -66,18 +64,12 @@ class FileServer(ConnectionServer): self.port_opened = {} - self.sites = self.site_manager.sites + self.sites = {} self.last_request = time.time() self.files_parsing = {} self.ui_server = None def getRandomPort(self, ip, port_range_from, port_range_to): - """Generates Random Port from given range - Args: - ip: IP Address - port_range_from: From Range - port_range_to: to Range - """ self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to)) tried = [] for bind_retry in range(100): @@ -116,7 +108,7 @@ class FileServer(ConnectionServer): self.log.debug("IPv6 supported on IP %s" % local_ipv6) return True except socket.error as err: - self.log.warning("IPv6 not supported: %s" % err) + self.log.error("IPv6 not supported: %s" % err) return False except Exception as err: self.log.error("IPv6 check error: %s" % err) @@ -295,10 +287,8 @@ class FileServer(ConnectionServer): with gevent.Timeout(10, exception=False): site.announcer.announcePex() - # Last check modification failed - if site.content_updated is False: - site.update() - elif site.bad_files: + # Retry failed files + if site.bad_files: site.retryBadFiles() if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7: @@ -318,8 +308,7 @@ class FileServer(ConnectionServer): def announceSite(self, site): site.announce(mode="update", pex=False) active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60 - if site.settings["own"] or active_site: - # Check connections more frequently on own and active sites to speed-up first connections + if site.settings["own"] or active_site: # Check connections more frequently on own and active sites to speed-up first connections site.needConnections(check_site_on_reconnect=True) site.sendMyHashfield(3) site.updateHashfield(3) @@ -337,49 +326,27 @@ class FileServer(ConnectionServer): time.sleep(1) taken = time.time() - s - # Query all trackers one-by-one in 20 minutes evenly distributed - sleep = max(0, 60 * 20 / len(config.trackers) - taken) - + sleep = max(0, 60 * 20 / len(config.trackers) - taken) # Query all trackers one-by-one in 20 minutes evenly distributed self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep)) time.sleep(sleep) # Detects if computer back from wakeup def wakeupWatcher(self): last_time = time.time() - last_my_ips = socket.gethostbyname_ex('')[2] while 1: time.sleep(30) - is_time_changed = time.time() - max(self.last_request, last_time) > 60 * 3 - if is_time_changed: + if time.time() - max(self.last_request, last_time) > 60 * 3: # If taken more than 3 minute then the computer was in sleep mode self.log.info( - "Wakeup detected: time warp from %0.f to %0.f (%0.f sleep seconds), acting like startup..." % + "Wakeup detected: time warp from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time() - last_time) ) - - my_ips = socket.gethostbyname_ex('')[2] - is_ip_changed = my_ips != last_my_ips - if is_ip_changed: - self.log.info("IP change detected from %s to %s" % (last_my_ips, my_ips)) - - if is_time_changed or is_ip_changed: self.checkSites(check_files=False, force_port_check=True) - last_time = time.time() - last_my_ips = my_ips # Bind and start serving sites def start(self, check_sites=True): - if self.stopping: - return False - ConnectionServer.start(self) - - try: - self.stream_server.start() - except Exception as err: - self.log.error("Error listening on: %s:%s: %s" % (self.ip, self.port, err)) - self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 03cc1f47..b5b22436 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -115,10 +115,7 @@ class Peer(object): return self.connection def __str__(self): - if self.site: - return "Peer:%-12s of %s" % (self.ip, self.site.address_short) - else: - return "Peer:%-12s" % self.ip + return "Peer:%-12s" % self.ip def __repr__(self): return "<%s>" % self.__str__() @@ -133,12 +130,9 @@ class Peer(object): def found(self, source="other"): if self.reputation < 5: if source == "tracker": - if self.ip.endswith(".onion"): - self.reputation += 1 - else: - self.reputation += 2 + self.reputation += 1 elif source == "local": - self.reputation += 20 + self.reputation += 3 if source in ("tracker", "local"): self.site.peers_recent.appendleft(self) @@ -345,10 +339,7 @@ class Peer(object): back[hash] += list(map(unpacker_func, peers)) for hash in res.get("my", []): - if self.connection: - back[hash].append((self.connection.ip, self.connection.port)) - else: - back[hash].append((self.ip, self.port)) + back[hash].append((self.connection.ip, self.connection.port)) return back diff --git a/src/Peer/PeerPortchecker.py b/src/Peer/PeerPortchecker.py index 3c4daecf..ad1aac01 100644 --- a/src/Peer/PeerPortchecker.py +++ b/src/Peer/PeerPortchecker.py @@ -9,10 +9,6 @@ from util import UpnpPunch class PeerPortchecker(object): - checker_functions = { - "ipv4": ["checkIpfingerprints", "checkCanyouseeme"], - "ipv6": ["checkMyaddr", "checkIpv6scanner"] - } def __init__(self, file_server): self.log = logging.getLogger("PeerPortchecker") self.upnp_port_opened = False @@ -22,9 +18,7 @@ class PeerPortchecker(object): if type(post_data) is dict: post_data = urllib.parse.urlencode(post_data).encode("utf8") req = urllib.request.Request(url, post_data) - req.add_header("Referer", url) - req.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11") - req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") + req.add_header('Referer', url) return urllib.request.urlopen(req, timeout=20.0) def portOpen(self, port): @@ -43,7 +37,10 @@ class PeerPortchecker(object): return UpnpPunch.ask_to_close_port(port, protos=["TCP"]) def portCheck(self, port, ip_type="ipv4"): - checker_functions = self.checker_functions[ip_type] + if ip_type == "ipv6": + checker_functions = ["checkMyaddr", "checkIpv6scanner"] + else: + checker_functions = ["checkPortchecker", "checkCanyouseeme"] for func_name in checker_functions: func = getattr(self, func_name) @@ -52,13 +49,13 @@ class PeerPortchecker(object): res = func(port) if res: self.log.info( - "Checked port %s (%s) using %s result: %s in %.3fs" % + "Checking port %s (%s) using %s result: %s in %.3fs" % (port, ip_type, func_name, res, time.time() - s) ) time.sleep(0.1) if res["opened"] and not self.file_server.had_external_incoming: res["opened"] = False - self.log.warning("Port %s:%s looks opened, but no incoming connection" % (res["ip"], port)) + self.log.warning("Port %s:%s, but no incoming connection" % (res["ip"], port)) break except Exception as err: self.log.warning( @@ -70,12 +67,11 @@ class PeerPortchecker(object): return res def checkCanyouseeme(self, port): - data = urllib.request.urlopen("https://www.canyouseeme.org/", b"ip=1.1.1.1&port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") + data = urllib.request.urlopen("http://www.canyouseeme.org/", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") + message = re.match('.*

    (.*?)

    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags - message = re.match(r'.*

    (.*?)

    ', data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags - - match = re.match(r".*service on (.*?) on", message) + match = re.match(".*service on (.*?) on", message) if match: ip = match.group(1) else: @@ -88,19 +84,41 @@ class PeerPortchecker(object): else: raise Exception("Invalid response: %s" % message) - def checkIpfingerprints(self, port): - data = self.requestUrl("https://www.ipfingerprints.com/portscan.php").read().decode("utf8") - ip = re.match(r'.*name="remoteHost".*?value="(.*?)"', data, re.DOTALL).group(1) + def checkPortchecker(self, port): + data = urllib.request.urlopen("https://portchecker.co/check", b"port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") + message = re.match('.*
    (.*?)
    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - post_data = { - "remoteHost": ip, "start_port": port, "end_port": port, - "normalScan": "Yes", "scan_type": "connect2", "ping_type": "none" - } - message = self.requestUrl("https://www.ipfingerprints.com/scripts/getPortsInfo.php", post_data).read().decode("utf8") + match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) + if match: + ip = match.group(1) + else: + raise Exception("Invalid response: %s" % message) if "open" in message: return {"ip": ip, "opened": True} - elif "filtered" in message or "closed" in message: + elif "closed" in message: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message) + + def checkSubnetonline(self, port): + url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php" + + data = self.requestUrl(url).read().decode("utf8") + + ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) + token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) + + post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."} + data = self.requestUrl(url, post_data).read().decode("utf8") + + message = re.match(".*
    (.*?)
    ", data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + + if "online" in message: + return {"ip": ip, "opened": True} + elif "closed" in message: return {"ip": ip, "opened": False} else: raise Exception("Invalid response: %s" % message) @@ -110,12 +128,12 @@ class PeerPortchecker(object): data = self.requestUrl(url).read().decode("utf8") - ip = re.match(r'.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) + ip = re.match('.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) post_data = {"addr": ip, "ports_selected": "", "ports_list": port} data = self.requestUrl(url, post_data).read().decode("utf8") - message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) + message = re.match(".*(.*?)
    ", data, re.DOTALL).group(1) if "ok.png" in message: return {"ip": ip, "opened": True} @@ -129,12 +147,12 @@ class PeerPortchecker(object): data = self.requestUrl(url).read().decode("utf8") - ip = re.match(r'.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) + ip = re.match('.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"} data = self.requestUrl(url, post_data).read().decode("utf8") - message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) + message = re.match(".*(.*?)
    ", data, re.DOTALL).group(1) message_text = re.sub("<.*?>", " ", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags if "OPEN" in message_text: @@ -144,46 +162,9 @@ class PeerPortchecker(object): else: raise Exception("Invalid response: %s" % message_text) - def checkPortchecker(self, port): # Not working: Forbidden - data = self.requestUrl("https://portchecker.co").read().decode("utf8") - csrf = re.match(r'.*name="_csrf" value="(.*?)"', data, re.DOTALL).group(1) - - data = self.requestUrl("https://portchecker.co", {"port": port, "_csrf": csrf}).read().decode("utf8") - message = re.match(r'.*
    (.*?)
    ', data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - match = re.match(r".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) - if match: - ip = match.group(1) - else: - raise Exception("Invalid response: %s" % message) - - if "open" in message: - return {"ip": ip, "opened": True} - elif "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkSubnetonline(self, port): # Not working: Invalid response - url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php" - - data = self.requestUrl(url).read().decode("utf8") - - ip = re.match(r'.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) - token = re.match(r'.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) - - post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."} - data = self.requestUrl(url, post_data).read().decode("utf8") - - print(post_data, data) - - message = re.match(r".*
    (.*?)
    ", data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - if "online" in message: - return {"ip": ip, "opened": True} - elif "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) +if __name__ == "__main__": + import time + peer_portchecker = PeerPortchecker() + for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]: + s = time.time() + print((func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s))) diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py index 56540e60..fbf37d3c 100644 --- a/src/Plugin/PluginManager.py +++ b/src/Plugin/PluginManager.py @@ -5,147 +5,62 @@ import shutil import time from collections import defaultdict -import importlib -import json - from Debug import Debug from Config import config + import plugins +import importlib + class PluginManager: def __init__(self): self.log = logging.getLogger("PluginManager") - self.path_plugins = None - if plugins.__file__: - self.path_plugins = os.path.dirname(os.path.abspath(plugins.__file__)); - self.path_installed_plugins = config.data_dir + "/__plugins__" + self.plugin_path = os.path.abspath(os.path.dirname(plugins.__file__)) self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class) self.subclass_order = {} # Record the load order of the plugins, to keep it after reload self.pluggable = {} self.plugin_names = [] # Loaded plugin names - self.plugins_updated = {} # List of updated plugins since restart - self.plugins_rev = {} # Installed plugins revision numbers - self.after_load = [] # Execute functions after loaded plugins - self.function_flags = {} # Flag function for permissions + self.after_load = [] # Execute functions after loaded plugins self.reloading = False - self.config_path = config.data_dir + "/plugins.json" - self.loadConfig() - self.config.setdefault("builtin", {}) - - if self.path_plugins: - sys.path.append(os.path.join(os.getcwd(), self.path_plugins)) + sys.path.append(os.path.join(os.getcwd(), self.plugin_path)) self.migratePlugins() if config.debug: # Auto reload Plugins on file change from Debug import DebugReloader DebugReloader.watcher.addCallback(self.reloadPlugins) - def loadConfig(self): - if os.path.isfile(self.config_path): - try: - self.config = json.load(open(self.config_path, encoding="utf8")) - except Exception as err: - self.log.error("Error loading %s: %s" % (self.config_path, err)) - self.config = {} - else: - self.config = {} - - def saveConfig(self): - f = open(self.config_path, "w", encoding="utf8") - json.dump(self.config, f, ensure_ascii=False, sort_keys=True, indent=2) - def migratePlugins(self): - for dir_name in os.listdir(self.path_plugins): + for dir_name in os.listdir(self.plugin_path): if dir_name == "Mute": self.log.info("Deleting deprecated/renamed plugin: %s" % dir_name) - shutil.rmtree("%s/%s" % (self.path_plugins, dir_name)) + shutil.rmtree("%s/%s" % (self.plugin_path, dir_name)) # -- Load / Unload -- - def listPlugins(self, list_disabled=False): - plugins = [] - for dir_name in sorted(os.listdir(self.path_plugins)): - dir_path = os.path.join(self.path_plugins, dir_name) - plugin_name = dir_name.replace("disabled-", "") - if dir_name.startswith("disabled"): - is_enabled = False - else: - is_enabled = True - - plugin_config = self.config["builtin"].get(plugin_name, {}) - if "enabled" in plugin_config: - is_enabled = plugin_config["enabled"] - - if dir_name == "__pycache__" or not os.path.isdir(dir_path): - continue # skip - if dir_name.startswith("Debug") and not config.debug: - continue # Only load in debug mode if module name starts with Debug - if not is_enabled and not list_disabled: - continue # Dont load if disabled - - plugin = {} - plugin["source"] = "builtin" - plugin["name"] = plugin_name - plugin["dir_name"] = dir_name - plugin["dir_path"] = dir_path - plugin["inner_path"] = plugin_name - plugin["enabled"] = is_enabled - plugin["rev"] = config.rev - plugin["loaded"] = plugin_name in self.plugin_names - plugins.append(plugin) - - plugins += self.listInstalledPlugins(list_disabled) - return plugins - - def listInstalledPlugins(self, list_disabled=False): - plugins = [] - - for address, site_plugins in sorted(self.config.items()): - if address == "builtin": - continue - for plugin_inner_path, plugin_config in sorted(site_plugins.items()): - is_enabled = plugin_config.get("enabled", False) - if not is_enabled and not list_disabled: - continue - plugin_name = os.path.basename(plugin_inner_path) - - dir_path = "%s/%s/%s" % (self.path_installed_plugins, address, plugin_inner_path) - - plugin = {} - plugin["source"] = address - plugin["name"] = plugin_name - plugin["dir_name"] = plugin_name - plugin["dir_path"] = dir_path - plugin["inner_path"] = plugin_inner_path - plugin["enabled"] = is_enabled - plugin["rev"] = plugin_config.get("rev", 0) - plugin["loaded"] = plugin_name in self.plugin_names - plugins.append(plugin) - - return plugins - # Load all plugin def loadPlugins(self): all_loaded = True s = time.time() - if self.path_plugins is None: - return - for plugin in self.listPlugins(): - self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"])) - if plugin["source"] != "builtin": - self.plugins_rev[plugin["name"]] = plugin["rev"] - site_plugin_dir = os.path.dirname(plugin["dir_path"]) - if site_plugin_dir not in sys.path: - sys.path.append(site_plugin_dir) + for dir_name in sorted(os.listdir(self.plugin_path)): + dir_path = os.path.join(self.plugin_path, dir_name) + if dir_name == "__pycache__": + continue # skip + if dir_name.startswith("disabled"): + continue # Dont load if disabled + if not os.path.isdir(dir_path): + continue # Dont load if not dir + if dir_name.startswith("Debug") and not config.debug: + continue # Only load in debug mode if module name starts with Debug + self.log.debug("Loading plugin: %s" % dir_name) try: - sys.modules[plugin["name"]] = __import__(plugin["dir_name"]) + __import__(dir_name) except Exception as err: - self.log.error("Plugin %s load error: %s" % (plugin["name"], Debug.formatException(err))) + self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err))) all_loaded = False - if plugin["name"] not in self.plugin_names: - self.plugin_names.append(plugin["name"]) + if dir_name not in self.plugin_names: + self.plugin_names.append(dir_name) self.log.debug("Plugins loaded in %.3fs" % (time.time() - s)) for func in self.after_load: @@ -159,23 +74,19 @@ class PluginManager: self.plugins_before = self.plugins self.plugins = defaultdict(list) # Reset registered plugins for module_name, module in list(sys.modules.items()): - if not module or not getattr(module, "__file__", None): - continue - if self.path_plugins not in module.__file__ and self.path_installed_plugins not in module.__file__: - continue - - if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled - # Re-add non-reloadable plugins - for class_name, classes in self.plugins_before.items(): - for c in classes: - if c.__module__ != module.__name__: - continue - self.plugins[class_name].append(c) - else: - try: - importlib.reload(module) - except Exception as err: - self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) + if module and getattr(module, "__file__", None) and self.plugin_path in module.__file__: # Module file in plugin_path + if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled + # Re-add non-reloadable plugins + for class_name, classes in self.plugins_before.items(): + for c in classes: + if c.__module__ != module.__name__: + continue + self.plugins[class_name].append(c) + else: + try: + importlib.reload(module) + except Exception as err: + self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) self.loadPlugins() # Load new plugins diff --git a/src/Site/Site.py b/src/Site/Site.py index d6179307..b8620b56 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -22,7 +22,6 @@ from .SiteStorage import SiteStorage from Crypt import CryptHash from util import helper from util import Diff -from util import GreenletManager from Plugin import PluginManager from File import FileServer from .SiteAnnouncer import SiteAnnouncer @@ -42,9 +41,8 @@ class Site(object): self.content = None # Load content.json self.peers = {} # Key: ip:port, Value: Peer.Peer - self.peers_recent = collections.deque(maxlen=150) + self.peers_recent = collections.deque(maxlen=100) self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself) - self.greenlet_manager = GreenletManager.GreenletManager() # Running greenlets self.worker_manager = WorkerManager(self) # Handle site download from other peers self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept) self.content_updated = None # Content.js update time @@ -69,6 +67,10 @@ class Site(object): self.announcer = SiteAnnouncer(self) # Announce and get peer list from other nodes + if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed) + self.settings["auth_key"] = CryptHash.random() + self.log.debug("New auth key: %s" % self.settings["auth_key"]) + if not self.settings.get("wrapper_key"): # To auth websocket permissions self.settings["wrapper_key"] = CryptHash.random() self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"]) @@ -123,7 +125,7 @@ class Site(object): if not SiteManager.site_manager.sites.get(self.address): SiteManager.site_manager.sites[self.address] = self SiteManager.site_manager.load(False) - SiteManager.site_manager.saveDelayed() + SiteManager.site_manager.save() def isServing(self): if config.offline: @@ -143,24 +145,18 @@ class Site(object): # Next size limit based on current size def getNextSizeLimit(self): - size_limits = [25, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] + size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] size = self.settings.get("size", 0) for size_limit in size_limits: if size * 1.2 < size_limit * 1024 * 1024: return size_limit return 999999 - def isAddedRecently(self): - return time.time() - self.settings.get("added", 0) < 60 * 60 * 24 - # Download all file from content.json def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False, diffs={}): s = time.time() if config.verbose: - self.log.debug( - "DownloadContent %s: Started. (download_files: %s, check_modifications: %s, diffs: %s)..." % - (inner_path, download_files, check_modifications, diffs.keys()) - ) + self.log.debug("Downloading %s..." % inner_path) if not inner_path.endswith("content.json"): return False @@ -168,35 +164,21 @@ class Site(object): found = self.needFile(inner_path, update=self.bad_files.get(inner_path)) content_inner_dir = helper.getDirname(inner_path) if not found: - self.log.debug("DownloadContent %s: Download failed, check_modifications: %s" % (inner_path, check_modifications)) + self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications)) if check_modifications: # Download failed, but check modifications if its succed later self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications") return False # Could not download content.json if config.verbose: - self.log.debug("DownloadContent got %s" % inner_path) - sub_s = time.time() - + self.log.debug("Got %s" % inner_path) changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False) - if config.verbose: - self.log.debug("DownloadContent %s: loadContent done in %.3fs" % (inner_path, time.time() - sub_s)) - if inner_path == "content.json": self.saveSettings() if peer: # Update last received update from peer to prevent re-sending the same update to it peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"] - # Verify size limit - if inner_path == "content.json": - site_size_limit = self.getSizeLimit() * 1024 * 1024 - content_size = len(json.dumps(self.content_manager.contents[inner_path], indent=1)) + sum([file["size"] for file in list(self.content_manager.contents[inner_path].get("files", {}).values()) if file["size"] >= 0]) # Size of new content - if site_size_limit < content_size: - # Not enought don't download anything - self.log.debug("DownloadContent Size limit reached (site too big please increase limit): %.2f MB > %.2f MB" % (content_size / 1024 / 1024, site_size_limit / 1024 / 1024)) - return False - # Start download files file_threads = [] if download_files: @@ -228,11 +210,11 @@ class Site(object): time_on_done = time.time() - s self.log.debug( - "DownloadContent Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % + "Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % (file_inner_path, time_diff, time_verify, time_write, time_on_done) ) except Exception as err: - self.log.debug("DownloadContent Failed to patch %s: %s" % (file_inner_path, err)) + self.log.debug("Failed to patch %s: %s" % (file_inner_path, err)) diff_success = False if not diff_success: @@ -266,21 +248,22 @@ class Site(object): include_threads.append(include_thread) if config.verbose: - self.log.debug("DownloadContent %s: Downloading %s includes..." % (inner_path, len(include_threads))) + self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads))) gevent.joinall(include_threads) if config.verbose: - self.log.debug("DownloadContent %s: Includes download ended" % inner_path) + self.log.debug("%s: Includes download ended" % inner_path) if check_modifications: # Check if every file is up-to-date self.checkModifications(0) if config.verbose: - self.log.debug("DownloadContent %s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) + self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) gevent.joinall(file_threads) if config.verbose: - self.log.debug("DownloadContent %s: ended in %.3fs (tasks left: %s)" % ( - inner_path, time.time() - s, len(self.worker_manager.tasks) - )) + self.log.debug("%s: DownloadContent ended in %.3fs" % (inner_path, time.time() - s)) + + if not self.worker_manager.tasks: + self.onComplete() # No more task trigger site complete return True @@ -325,22 +308,16 @@ class Site(object): # Download all files of the site @util.Noparallel(blocking=False) - def download(self, check_size=False, blind_includes=False, retry_bad_files=True): + def download(self, check_size=False, blind_includes=False): if not self.connection_server: self.log.debug("No connection server found, skipping download") return False - s = time.time() self.log.debug( - "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s, isAddedRecently: %s" % - (self.bad_files, check_size, blind_includes, self.isAddedRecently()) + "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s" % + (self.bad_files, check_size, blind_includes) ) - - if self.isAddedRecently(): - gevent.spawn(self.announce, mode="start", force=True) - else: - gevent.spawn(self.announce, mode="update") - + gevent.spawn(self.announce, force=True) if check_size: # Check the size first valid = self.downloadContent("content.json", download_files=False) # Just download content.json files if not valid: @@ -349,9 +326,7 @@ class Site(object): # Download everything valid = self.downloadContent("content.json", check_modifications=blind_includes) - if retry_bad_files: - self.onComplete.once(lambda: self.retryBadFiles(force=True)) - self.log.debug("Download done in %.3fs" % (time.time() - s)) + self.onComplete.once(lambda: self.retryBadFiles(force=True)) return valid @@ -374,7 +349,6 @@ class Site(object): del self.bad_files[aborted_inner_path] self.worker_manager.removeSolvedFileTasks(mark_as_good=False) break - pool.join() self.log.debug("Ended downloadContent pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) def pooledDownloadFile(self, inner_paths, pool_size=100, only_if_bad=False): @@ -392,13 +366,12 @@ class Site(object): # Update worker, try to find client that supports listModifications command def updater(self, peers_try, queried, since): - threads = [] while 1: if not peers_try or len(queried) >= 3: # Stop after 3 successful query break peer = peers_try.pop(0) if config.verbose: - self.log.debug("CheckModifications: Try to get updates from: %s Left: %s" % (peer, peers_try)) + self.log.debug("Try to get updates from: %s Left: %s" % (peer, peers_try)) res = None with gevent.Timeout(20, exception=False): @@ -410,7 +383,6 @@ class Site(object): queried.append(peer) modified_contents = [] my_modified = self.content_manager.listModified(since) - num_old_files = 0 for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we has_newer = int(modified) > my_modified.get(inner_path, 0) has_older = int(modified) < my_modified.get(inner_path, 0) @@ -419,18 +391,13 @@ class Site(object): # We dont have this file or we have older modified_contents.append(inner_path) self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 - if has_older and num_old_files < 5: - num_old_files += 1 - self.log.debug("CheckModifications: %s client has older version of %s, publishing there (%s/5)..." % (peer, inner_path, num_old_files)) + if has_older: + self.log.debug("%s client has older version of %s, publishing there..." % (peer, inner_path)) gevent.spawn(self.publisher, inner_path, [peer], [], 1) if modified_contents: - self.log.debug("CheckModifications: %s new modified file from %s" % (len(modified_contents), peer)) + self.log.debug("%s new modified file from %s" % (len(modified_contents), peer)) modified_contents.sort(key=lambda inner_path: 0 - res["modified_files"][inner_path]) # Download newest first - t = gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True) - threads.append(t) - if config.verbose: - self.log.debug("CheckModifications: Waiting for %s pooledDownloadContent" % len(threads)) - gevent.joinall(threads) + gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True) # Check modified content.json files from peers and add modified files to bad_files # Return: Successfully queried peers [Peer, Peer...] @@ -442,10 +409,10 @@ class Site(object): # Wait for peers if not self.peers: - self.announce(mode="update") + self.announce() for wait in range(10): time.sleep(5 + wait) - self.log.debug("CheckModifications: Waiting for peers...") + self.log.debug("Waiting for peers...") if self.peers: break @@ -459,7 +426,7 @@ class Site(object): if config.verbose: self.log.debug( - "CheckModifications: Try to get listModifications from peers: %s, connected: %s, since: %s" % + "Try to get listModifications from peers: %s, connected: %s, since: %s" % (peers_try, peers_connected_num, since) ) @@ -476,7 +443,7 @@ class Site(object): if queried: break - self.log.debug("CheckModifications: Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since)) + self.log.debug("Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since)) time.sleep(0.1) return queried @@ -499,7 +466,7 @@ class Site(object): self.checkBadFiles() if announce: - self.announce(mode="update", force=True) + self.announce(force=True) # Full update, we can reset bad files if check_files and since == 0: @@ -516,6 +483,7 @@ class Site(object): if len(queried) == 0: # Failed to query modifications self.content_updated = False + self.bad_files["content.json"] = 1 else: self.content_updated = time.time() @@ -585,7 +553,7 @@ class Site(object): publishers = [] # Publisher threads if not self.peers: - self.announce(mode="more") + self.announce() if limit == "default": limit = 5 @@ -635,7 +603,6 @@ class Site(object): return len(published) # Copy this site - @util.Noparallel() def clone(self, address, privatekey=None, address_index=None, root_inner_path="", overwrite=False): import shutil new_site = SiteManager.site_manager.need(address, all_file=False) @@ -708,9 +675,9 @@ class Site(object): shutil.copy(file_path, file_path_dest) # If -default in path, create a -default less copy of the file - if "-default" in file_inner_path_dest: - file_path_dest = new_site.storage.getPath(file_inner_path_dest.replace("-default", "")) - if new_site.storage.isFile(file_inner_path_dest.replace("-default", "")) and not overwrite: + if "-default" in file_inner_path: + file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", "")) + if new_site.storage.isFile(file_inner_path.replace("-default", "")) and not overwrite: # Don't overwrite site files with default ones self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path) continue @@ -721,15 +688,15 @@ class Site(object): shutil.copy(file_path, file_path_dest) # Sign if content json if file_path_dest.endswith("/content.json"): - new_site.storage.onUpdated(file_inner_path_dest.replace("-default", "")) + new_site.storage.onUpdated(file_inner_path.replace("-default", "")) new_site.content_manager.loadContent( - file_inner_path_dest.replace("-default", ""), add_bad_files=False, + file_inner_path.replace("-default", ""), add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign(file_inner_path_dest.replace("-default", ""), privatekey, remove_missing_optional=True) + new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey, remove_missing_optional=True) new_site.content_manager.loadContent( - file_inner_path_dest, add_bad_files=False, delete_removed_files=False, load_includes=False + file_inner_path, add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: @@ -741,10 +708,7 @@ class Site(object): # Rebuild DB if new_site.storage.isFile("dbschema.json"): new_site.storage.closeDb() - try: - new_site.storage.rebuildDb() - except Exception as err: - self.log.error(err) + new_site.storage.rebuildDb() return new_site @@ -753,10 +717,6 @@ class Site(object): return self.needFile(*args, **kwargs) def isFileDownloadAllowed(self, inner_path, file_info): - # Verify space for all site - if self.settings["size"] > self.getSizeLimit() * 1024 * 1024: - return False - # Verify space for file if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024: self.log.debug( "File size %s too large: %sMB > %sMB, skipping..." % @@ -779,21 +739,15 @@ class Site(object): # Check and download if file not exist def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0): - if self.worker_manager.tasks.findTask(inner_path): - task = self.worker_manager.addTask(inner_path, peer, priority=priority) - if blocking: - return task["evt"].get() - else: - return task["evt"] - elif self.storage.isFile(inner_path) and not update: # File exist, no need to do anything + if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything return True elif not self.isServing(): # Site not serving return False else: # Wait until file downloaded + self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file if not self.content_manager.contents.get("content.json"): # No content.json, download it first! - self.log.debug("Need content.json first (inner_path: %s, priority: %s)" % (inner_path, priority)) - if priority > 0: - gevent.spawn(self.announce) + self.log.debug("Need content.json first") + gevent.spawn(self.announce) if inner_path != "content.json": # Prevent double download task = self.worker_manager.addTask("content.json", peer) task["evt"].get() @@ -819,8 +773,6 @@ class Site(object): self.log.debug("%s: Download not allowed" % inner_path) return False - self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file - task = self.worker_manager.addTask(inner_path, peer, priority=priority, file_info=file_info) if blocking: return task["evt"].get() @@ -850,8 +802,7 @@ class Site(object): return peer def announce(self, *args, **kwargs): - if self.isServing(): - self.announcer.announce(*args, **kwargs) + self.announcer.announce(*args, **kwargs) # Keep connections to get the updates def needConnections(self, num=None, check_site_on_reconnect=False): @@ -869,7 +820,7 @@ class Site(object): self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers))) if connected < need: # Need more than we have - for peer in self.getRecentPeers(30): + for peer in list(self.peers.values()): if not peer.connection or not peer.connection.connected: # No peer connection or disconnected peer.pex() # Initiate peer exchange if peer.connection and peer.connection.connected: @@ -895,8 +846,6 @@ class Site(object): continue # Not connectable if not peer.connection: continue # No connection - if peer.ip.endswith(".onion") and not self.connection_server.tor_manager.enabled: - continue # Onion not supported if peer.key in ignore: continue # The requester has this peer if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago @@ -921,10 +870,7 @@ class Site(object): # Return: Recently found peers def getRecentPeers(self, need_num): found = list(set(self.peers_recent)) - self.log.debug( - "Recent peers %s of %s (need: %s)" % - (len(found), len(self.peers), need_num) - ) + self.log.debug("Recent peers %s of %s (need: %s)" % (len(found), len(self.peers_recent), need_num)) if len(found) >= need_num or len(found) >= len(self.peers): return sorted( @@ -935,13 +881,8 @@ class Site(object): # Add random peers need_more = need_num - len(found) - if not self.connection_server.tor_manager.enabled: - peers = [peer for peer in self.peers.values() if not peer.ip.endswith(".onion")] - else: - peers = list(self.peers.values()) - found_more = sorted( - peers[0:need_more * 50], + list(self.peers.values())[0:need_more * 50], key=lambda peer: peer.reputation, reverse=True )[0:need_more * 2] @@ -1061,22 +1002,14 @@ class Site(object): return self.settings.get("autodownloadoptional") def delete(self): - self.log.info("Deleting site...") - s = time.time() self.settings["serving"] = False - self.settings["deleting"] = True self.saveSettings() - num_greenlets = self.greenlet_manager.stopGreenlets("Site %s deleted" % self.address) self.worker_manager.running = False - num_workers = self.worker_manager.stopWorkers() - SiteManager.site_manager.delete(self.address) - self.content_manager.contents.db.deleteSite(self) - self.updateWebsocket(deleted=True) + self.worker_manager.stopWorkers() self.storage.deleteFiles() - self.log.info( - "Deleted site in %.3fs (greenlets: %s, workers: %s)" % - (time.time() - s, num_greenlets, num_workers) - ) + self.updateWebsocket(deleted=True) + self.content_manager.contents.db.deleteSite(self) + SiteManager.site_manager.delete(self.address) # - Events - diff --git a/src/Site/SiteAnnouncer.py b/src/Site/SiteAnnouncer.py index 2fd63e82..f066a033 100644 --- a/src/Site/SiteAnnouncer.py +++ b/src/Site/SiteAnnouncer.py @@ -1,16 +1,22 @@ import random import time import hashlib +import urllib.request +import struct +import socket import re import collections +import bencode +from lib.subtl.subtl import UdpTrackerClient +import socks +import sockshandler import gevent from Plugin import PluginManager from Config import config from Debug import Debug from util import helper -from greenlet import GreenletExit import util @@ -35,12 +41,12 @@ class SiteAnnouncer(object): def getSupportedTrackers(self): trackers = self.getTrackers() + if config.disable_udp or config.trackers_proxy != "disable": + trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")] if not self.site.connection_server.tor_manager.enabled: trackers = [tracker for tracker in trackers if ".onion" not in tracker] - trackers = [tracker for tracker in trackers if self.getAddressParts(tracker)] # Remove trackers with unknown address - if "ipv6" not in self.site.connection_server.supported_ip_types: trackers = [tracker for tracker in trackers if helper.getIpType(self.getAddressParts(tracker)["ip"]) != "ipv6"] @@ -61,7 +67,7 @@ class SiteAnnouncer(object): def getOpenedServiceTypes(self): back = [] # Type of addresses they can reach me - if config.trackers_proxy == "disable" and config.tor != "always": + if config.trackers_proxy == "disable": for ip_type, opened in list(self.site.connection_server.port_opened.items()): if opened: back.append(ip_type) @@ -93,12 +99,11 @@ class SiteAnnouncer(object): for tracker in trackers: # Start announce threads tracker_stats = global_stats[tracker] # Reduce the announce time for trackers that looks unreliable - time_announce_allowed = time.time() - 60 * min(30, tracker_stats["num_error"]) - if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time_announce_allowed and not force: + if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time.time() - 60 * min(30, tracker_stats["num_error"]): if config.verbose: self.site.log.debug("Tracker %s looks unreliable, announce skipped (error: %s)" % (tracker, tracker_stats["num_error"])) continue - thread = self.site.greenlet_manager.spawn(self.announceTracker, tracker, mode=mode) + thread = gevent.spawn(self.announceTracker, tracker, mode=mode) threads.append(thread) thread.tracker = tracker @@ -138,7 +143,7 @@ class SiteAnnouncer(object): self.site.log.error("Announce to %s trackers in %.3fs, failed" % (len(threads), time.time() - s)) if len(threads) == 1 and mode != "start": # Move to next tracker self.site.log.debug("Tracker failed, skipping to next one...") - self.site.greenlet_manager.spawnLater(1.0, self.announce, force=force, mode=mode, pex=pex) + gevent.spawn_later(1.0, self.announce, force=force, mode=mode, pex=pex) self.updateWebsocket(trackers="announced") @@ -152,7 +157,15 @@ class SiteAnnouncer(object): self.updateWebsocket(pex="announced") def getTrackerHandler(self, protocol): - return None + if protocol == "udp": + handler = self.announceTrackerUdp + elif protocol == "http": + handler = self.announceTrackerHttp + elif protocol == "https": + handler = self.announceTrackerHttps + else: + handler = None + return handler def getAddressParts(self, tracker): if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker): @@ -210,12 +223,10 @@ class SiteAnnouncer(object): self.stats[tracker]["time_status"] = time.time() self.stats[tracker]["last_error"] = str(error) self.stats[tracker]["time_last_error"] = time.time() - if self.site.connection_server.has_internet: - self.stats[tracker]["num_error"] += 1 + self.stats[tracker]["num_error"] += 1 self.stats[tracker]["num_request"] += 1 global_stats[tracker]["num_request"] += 1 - if self.site.connection_server.has_internet: - global_stats[tracker]["num_error"] += 1 + global_stats[tracker]["num_error"] += 1 self.updateWebsocket(tracker="error") return False @@ -255,6 +266,112 @@ class SiteAnnouncer(object): ) return time.time() - s + def announceTrackerUdp(self, tracker_address, mode="start", num_want=10): + s = time.time() + if config.disable_udp: + raise AnnounceError("Udp disabled by config") + if config.trackers_proxy != "disable": + raise AnnounceError("Udp trackers not available with proxies") + + ip, port = tracker_address.split("/")[0].split(":") + tracker = UdpTrackerClient(ip, int(port)) + if helper.getIpType(ip) in self.getOpenedServiceTypes(): + tracker.peer_port = self.fileserver_port + else: + tracker.peer_port = 0 + tracker.connect() + if not tracker.poll_once(): + raise AnnounceError("Could not connect") + tracker.announce(info_hash=self.site.address_sha1, num_want=num_want, left=431102370) + back = tracker.poll_once() + if not back: + raise AnnounceError("No response after %.0fs" % (time.time() - s)) + elif type(back) is dict and "response" in back: + peers = back["response"]["peers"] + else: + raise AnnounceError("Invalid response: %r" % back) + + return peers + + def httpRequest(self, url): + headers = { + 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', + 'Accept-Encoding': 'none', + 'Accept-Language': 'en-US,en;q=0.8', + 'Connection': 'keep-alive' + } + + req = urllib.request.Request(url, headers=headers) + + if config.trackers_proxy == "tor": + tor_manager = self.site.connection_server.tor_manager + handler = sockshandler.SocksiPyHandler(socks.SOCKS5, tor_manager.proxy_ip, tor_manager.proxy_port) + opener = urllib.request.build_opener(handler) + return opener.open(req, timeout=50) + elif config.trackers_proxy == "disable": + return urllib.request.urlopen(req, timeout=25) + else: + proxy_ip, proxy_port = config.trackers_proxy.split(":") + handler = sockshandler.SocksiPyHandler(socks.SOCKS5, proxy_ip, int(proxy_port)) + opener = urllib.request.build_opener(handler) + return opener.open(req, timeout=50) + + def announceTrackerHttps(self, *args, **kwargs): + kwargs["protocol"] = "https" + return self.announceTrackerHttp(*args, **kwargs) + + def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protocol="http"): + tracker_ip, tracker_port = tracker_address.rsplit(":", 1) + if helper.getIpType(tracker_ip) in self.getOpenedServiceTypes(): + port = self.fileserver_port + else: + port = 1 + params = { + 'info_hash': self.site.address_sha1, + 'peer_id': self.peer_id, 'port': port, + 'uploaded': 0, 'downloaded': 0, 'left': 431102370, 'compact': 1, 'numwant': num_want, + 'event': 'started' + } + + url = protocol + "://" + tracker_address + "?" + urllib.parse.urlencode(params) + + s = time.time() + response = None + # Load url + if config.tor == "always" or config.trackers_proxy != "disable": + timeout = 60 + else: + timeout = 30 + + with gevent.Timeout(timeout, False): # Make sure of timeout + req = self.httpRequest(url) + response = req.read() + req.close() + req = None + + if not response: + raise AnnounceError("No response after %.0fs" % (time.time() - s)) + + # Decode peers + try: + peer_data = bencode.decode(response)["peers"] + if type(peer_data) is not bytes: + peer_data = peer_data.encode() + response = None + peer_count = int(len(peer_data) / 6) + peers = [] + for peer_offset in range(peer_count): + off = 6 * peer_offset + peer = peer_data[off:off + 6] + addr, port = struct.unpack('!LH', peer) + peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port}) + except Exception as err: + raise AnnounceError("Invalid response: %r (%s)" % (response, Debug.formatException(err))) + + return peers + @util.Noparallel(blocking=False) def announcePex(self, query_num=2, need_num=5): peers = self.site.getConnectedPeers() @@ -263,7 +380,7 @@ class SiteAnnouncer(object): peers = self.site.getConnectedPeers() if len(peers) == 0: # Small number of connected peers for this site, connect to any - peers = list(self.site.getRecentPeers(20)) + peers = list(self.site.peers.values()) need_num = 10 random.shuffle(peers) @@ -277,8 +394,6 @@ class SiteAnnouncer(object): if num_added: self.site.worker_manager.onPeers() self.site.updateWebsocket(peers_added=num_added) - else: - time.sleep(0.1) if done == query_num: break self.site.log.debug("Pex result: from %s peers got %s new peers." % (done, total_added)) diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index 684d69fc..40724866 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -7,13 +7,10 @@ import atexit import gevent -import util from Plugin import PluginManager from Content import ContentDb from Config import config from util import helper -from util import RateLimit -from util import Cached @PluginManager.acceptPlugins @@ -28,25 +25,14 @@ class SiteManager(object): atexit.register(lambda: self.save(recalculate_size=True)) # Load all sites from data/sites.json - @util.Noparallel() def load(self, cleanup=True, startup=False): - from Debug import Debug - self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup)) + self.log.debug("Loading sites...") self.loaded = False from .Site import Site address_found = [] added = 0 - load_s = time.time() # Load new adresses - try: - json_path = "%s/sites.json" % config.data_dir - data = json.load(open(json_path)) - except Exception as err: - raise Exception("Unable to load %s: %s" % (json_path, err)) - - sites_need = [] - - for address, settings in data.items(): + for address, settings in json.load(open("%s/sites.json" % config.data_dir)).items(): if address not in self.sites: if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): # Root content.json exists, try load site @@ -63,7 +49,7 @@ class SiteManager(object): elif startup: # No site directory, start download self.log.debug("Found new site in sites.json: %s" % address) - sites_need.append([address, settings]) + gevent.spawn(self.need, address, settings=settings) added += 1 address_found.append(address) @@ -79,7 +65,7 @@ class SiteManager(object): content_db = ContentDb.getContentDb() for row in content_db.execute("SELECT * FROM site").fetchall(): address = row["address"] - if address not in self.sites and address not in address_found: + if address not in self.sites: self.log.info("Deleting orphan site from content.db: %s" % address) try: @@ -92,14 +78,9 @@ class SiteManager(object): if address in content_db.sites: del content_db.sites[address] - self.loaded = True - for address, settings in sites_need: - gevent.spawn(self.need, address, settings=settings) if added: - self.log.info("Added %s sites in %.3fs" % (added, time.time() - load_s)) - - def saveDelayed(self): - RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save) + self.log.debug("SiteManager added %s sites" % added) + self.loaded = True def save(self, recalculate_size=False): if not self.sites: @@ -112,7 +93,7 @@ class SiteManager(object): data = {} # Generate data file s = time.time() - for address, site in list(self.list().items()): + for address, site in self.list().items(): if recalculate_size: site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size data[address] = site.settings @@ -121,7 +102,7 @@ class SiteManager(object): s = time.time() if data: - helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8")) + helper.atomicWrite("%s/sites.json" % config.data_dir, json.dumps(data, indent=2, sort_keys=True).encode()) else: self.log.debug("Save error: No data") time_write = time.time() - s @@ -144,67 +125,41 @@ class SiteManager(object): def isDomain(self, address): return False - @Cached(timeout=10) - def isDomainCached(self, address): - return self.isDomain(address) - - def resolveDomain(self, domain): - return False - - @Cached(timeout=10) - def resolveDomainCached(self, domain): - return self.resolveDomain(domain) - # Return: Site object or None if not found def get(self, address): - if self.isDomainCached(address): - address_resolved = self.resolveDomainCached(address) - if address_resolved: - address = address_resolved - if not self.loaded: # Not loaded yet self.log.debug("Loading site: %s)..." % address) self.load() - site = self.sites.get(address) - - return site - - def add(self, address, all_file=True, settings=None, **kwargs): - from .Site import Site - self.sites_changed = int(time.time()) - # Try to find site with differect case - for recover_address, recover_site in list(self.sites.items()): - if recover_address.lower() == address.lower(): - return recover_site - - if not self.isAddress(address): - return False # Not address: %s % address - self.log.debug("Added new site: %s" % address) - config.loadTrackersFile() - site = Site(address, settings=settings) - self.sites[address] = site - if not site.settings["serving"]: # Maybe it was deleted before - site.settings["serving"] = True - site.saveSettings() - if all_file: # Also download user files on first sync - site.download(check_size=True, blind_includes=True) - return site + return self.sites.get(address) # Return or create site and start download site files - def need(self, address, *args, **kwargs): - if self.isDomainCached(address): - address_resolved = self.resolveDomainCached(address) - if address_resolved: - address = address_resolved - + def need(self, address, all_file=True, settings=None): + from .Site import Site site = self.get(address) if not site: # Site not exist yet - site = self.add(address, *args, **kwargs) + self.sites_changed = int(time.time()) + # Try to find site with differect case + for recover_address, recover_site in list(self.sites.items()): + if recover_address.lower() == address.lower(): + return recover_site + + if not self.isAddress(address): + return False # Not address: %s % address + self.log.debug("Added new site: %s" % address) + config.loadTrackersFile() + site = Site(address, settings=settings) + self.sites[address] = site + if not site.settings["serving"]: # Maybe it was deleted before + site.settings["serving"] = True + site.saveSettings() + if all_file: # Also download user files on first sync + site.download(check_size=True, blind_includes=True) + return site def delete(self, address): self.sites_changed = int(time.time()) - self.log.debug("Deleted site: %s" % address) + self.log.debug("SiteManager deleted site: %s" % address) del(self.sites[address]) # Delete from sites.json self.save() diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py index 27032e79..d25e5a3f 100644 --- a/src/Site/SiteStorage.py +++ b/src/Site/SiteStorage.py @@ -3,7 +3,7 @@ import re import shutil import json import time -import errno +import sys from collections import defaultdict import sqlite3 @@ -15,16 +15,10 @@ from Db.Db import Db from Debug import Debug from Config import config from util import helper -from util import ThreadPool from Plugin import PluginManager from Translate import translate as _ -thread_pool_fs_read = ThreadPool.ThreadPool(config.threads_fs_read, name="FS read") -thread_pool_fs_write = ThreadPool.ThreadPool(config.threads_fs_write, name="FS write") -thread_pool_fs_batch = ThreadPool.ThreadPool(1, name="FS batch") - - @PluginManager.acceptPlugins class SiteStorage(object): def __init__(self, site, allow_create=True): @@ -44,14 +38,11 @@ class SiteStorage(object): raise Exception("Directory not exists: %s" % self.directory) def getDbFile(self): - if self.db: - return self.db.schema["db_file"] + if self.isFile("dbschema.json"): + schema = self.loadJson("dbschema.json") + return schema["db_file"] else: - if self.isFile("dbschema.json"): - schema = self.loadJson("dbschema.json") - return schema["db_file"] - else: - return False + return False # Create new databaseobject with the site's schema def openDb(self, close_idle=False): @@ -59,65 +50,46 @@ class SiteStorage(object): db_path = self.getPath(schema["db_file"]) return Db(schema, db_path, close_idle=close_idle) - def closeDb(self, reason="Unknown (SiteStorage)"): + def closeDb(self): if self.db: - self.db.close(reason) + self.db.close() self.event_db_busy = None self.db = None def getDbSchema(self): try: - self.site.needFile("dbschema.json") schema = self.loadJson("dbschema.json") except Exception as err: raise Exception("dbschema.json is not a valid JSON: %s" % err) return schema - def loadDb(self): - self.log.debug("No database, waiting for dbschema.json...") - self.site.needFile("dbschema.json", priority=3) - self.log.debug("Got dbschema.json") - self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist - if self.has_db: - schema = self.getDbSchema() - db_path = self.getPath(schema["db_file"]) - if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: - try: - self.rebuildDb(reason="Missing database") - except Exception as err: - self.log.error(err) - pass + # Return db class + def getDb(self): + if not self.db: + self.log.debug("No database, waiting for dbschema.json...") + self.site.needFile("dbschema.json", priority=3) + self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist + if self.has_db: + schema = self.getDbSchema() + db_path = self.getPath(schema["db_file"]) + if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: + self.rebuildDb() + + if self.db: + self.db.close() + self.db = self.openDb(close_idle=True) - if self.db: - self.db.close("Gettig new db for SiteStorage") - self.db = self.openDb(close_idle=True) - try: changed_tables = self.db.checkTables() if changed_tables: - self.rebuildDb(delete_db=False, reason="Changed tables") # TODO: only update the changed table datas - except sqlite3.OperationalError: - pass + self.rebuildDb(delete_db=False) # TODO: only update the changed table datas - # Return db class - @util.Noparallel() - def getDb(self): - if self.event_db_busy: # Db not ready for queries - self.log.debug("Wating for db...") - self.event_db_busy.get() # Wait for event - if not self.db: - self.loadDb() return self.db def updateDbFile(self, inner_path, file=None, cur=None): path = self.getPath(inner_path) - if cur: - db = cur.db - else: - db = self.getDb() - return db.updateJson(path, file, cur) + return self.getDb().updateJson(path, file, cur) # Return possible db files for the site - @thread_pool_fs_read.wrap def getDbFiles(self): found = 0 for content_inner_path, content in self.site.content_manager.contents.items(): @@ -125,7 +97,7 @@ class SiteStorage(object): if self.isFile(content_inner_path): yield content_inner_path, self.getPath(content_inner_path) else: - self.log.debug("[MISSING] %s" % content_inner_path) + self.log.error("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): @@ -136,16 +108,15 @@ class SiteStorage(object): if self.isFile(file_inner_path): yield file_inner_path, self.getPath(file_inner_path) else: - self.log.debug("[MISSING] %s" % file_inner_path) + self.log.error("[MISSING] %s" % file_inner_path) found += 1 if found % 100 == 0: time.sleep(0.001) # Context switch to avoid UI block # Rebuild sql cache @util.Noparallel() - @thread_pool_fs_batch.wrap - def rebuildDb(self, delete_db=True, reason="Unknown"): - self.log.info("Rebuilding db (reason: %s)..." % reason) + def rebuildDb(self, delete_db=True): + self.log.info("Rebuilding db...") self.has_db = self.isFile("dbschema.json") if not self.has_db: return False @@ -154,7 +125,7 @@ class SiteStorage(object): db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path) and delete_db: if self.db: - self.closeDb("rebuilding") # Close db if open + self.closeDb() # Close db if open time.sleep(0.5) self.log.info("Deleting %s" % db_path) try: @@ -166,28 +137,25 @@ class SiteStorage(object): self.db = self.openDb() self.event_db_busy = gevent.event.AsyncResult() - self.log.info("Rebuild: Creating tables...") - - # raise DbTableError if not valid - self.db.checkTables() - + self.log.info("Creating tables...") + changed_tables = self.db.checkTables() + if not changed_tables: + # It failed + # "Error creating table..." + return False cur = self.db.getCursor() cur.logging = False s = time.time() - self.log.info("Rebuild: Getting db files...") + self.log.info("Getting db files...") db_files = list(self.getDbFiles()) num_imported = 0 num_total = len(db_files) num_error = 0 - self.log.info("Rebuild: Importing data...") + self.log.info("Importing data...") try: if num_total > 100: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - "0000", num_total, num_error - ), "rebuild", 0 - ) + self.site.messageWebsocket(_["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format("0000", num_total, num_error), "rebuild", 0) for file_inner_path, file_path in db_files: try: if self.updateDbFile(file_inner_path, file=open(file_path, "rb"), cur=cur): @@ -198,25 +166,19 @@ class SiteStorage(object): if num_imported and num_imported % 100 == 0: self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - num_imported, num_total, num_error - ), - "rebuild", int(float(num_imported) / num_total * 100) + _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format(num_imported, num_total, num_error), + "rebuild", + int(float(num_imported) / num_total * 100) ) time.sleep(0.001) # Context switch to avoid UI block finally: cur.close() if num_total > 100: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - num_imported, num_total, num_error - ), "rebuild", 100 - ) - self.log.info("Rebuild: Imported %s data file in %.3fs" % (num_imported, time.time() - s)) + self.site.messageWebsocket(_["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format(num_imported, num_total, num_error), "rebuild", 100) + self.log.info("Imported %s data file in %.3fs" % (num_imported, time.time() - s)) self.event_db_busy.set(True) # Event done, notify waiters self.event_db_busy = None # Clear event - self.db.commit("Rebuilt") return True @@ -225,51 +187,42 @@ class SiteStorage(object): if not query.strip().upper().startswith("SELECT"): raise Exception("Only SELECT query supported") + if self.event_db_busy: # Db not ready for queries + self.log.debug("Wating for db...") + self.event_db_busy.get() # Wait for event try: res = self.getDb().execute(query, params) except sqlite3.DatabaseError as err: if err.__class__.__name__ == "DatabaseError": self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query)) - try: - self.rebuildDb(reason="Query error") - except sqlite3.OperationalError: - pass + self.rebuildDb() res = self.db.cur.execute(query, params) else: raise err return res - def ensureDir(self, inner_path): - try: - os.makedirs(self.getPath(inner_path)) - except OSError as err: - if err.errno == errno.EEXIST: - return False - else: - raise err - return True - # Open file object def open(self, inner_path, mode="rb", create_dirs=False, **kwargs): file_path = self.getPath(inner_path) if create_dirs: - file_inner_dir = os.path.dirname(inner_path) - self.ensureDir(file_inner_dir) + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) return open(file_path, mode, **kwargs) # Open file object - @thread_pool_fs_read.wrap def read(self, inner_path, mode="rb"): - return self.open(inner_path, mode).read() + return open(self.getPath(inner_path), mode).read() - @thread_pool_fs_write.wrap - def writeThread(self, inner_path, content): + # Write content to file + def write(self, inner_path, content): file_path = self.getPath(inner_path) # Create dir if not exist - self.ensureDir(os.path.dirname(inner_path)) + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) # Write file if hasattr(content, 'read'): # File-like object - with open(file_path, "wb") as file: shutil.copyfileobj(content, file) # Write buff to disk else: # Simple string @@ -278,10 +231,7 @@ class SiteStorage(object): else: with open(file_path, "wb") as file: file.write(content) - - # Write content to file - def write(self, inner_path, content): - self.writeThread(inner_path, content) + del content self.onUpdated(inner_path) # Remove file from filesystem @@ -309,7 +259,6 @@ class SiteStorage(object): raise rename_err # List files from a directory - @thread_pool_fs_read.wrap def walk(self, dir_inner_path, ignore=None): directory = self.getPath(dir_inner_path) for root, dirs, files in os.walk(directory): @@ -342,7 +291,6 @@ class SiteStorage(object): dirs[:] = dirs_filtered # list directories in a directory - @thread_pool_fs_read.wrap def list(self, dir_inner_path): directory = self.getPath(dir_inner_path) return os.listdir(directory) @@ -350,43 +298,62 @@ class SiteStorage(object): # Site content updated def onUpdated(self, inner_path, file=None): # Update Sql cache - should_load_to_db = inner_path.endswith(".json") or inner_path.endswith(".json.gz") if inner_path == "dbschema.json": self.has_db = self.isFile("dbschema.json") # Reopen DB to check changes if self.has_db: - self.closeDb("New dbschema") - gevent.spawn(self.getDb) - elif not config.disable_db and should_load_to_db and self.has_db: # Load json file to db + self.closeDb() + self.getDb() + elif not config.disable_db and (inner_path.endswith(".json") or inner_path.endswith(".json.gz")) and self.has_db: # Load json file to db if config.verbose: self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file)) try: self.updateDbFile(inner_path, file) except Exception as err: self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err))) - self.closeDb("Json load error") + self.closeDb() # Load and parse json file - @thread_pool_fs_read.wrap def loadJson(self, inner_path): - try: - with self.open(inner_path, "r", encoding="utf8") as file: - return json.load(file) - except Exception as err: - self.log.warning("Json load error: %s" % Debug.formatException(err)) - return None + with self.open(inner_path, "r", encoding="utf8") as file: + return json.load(file) + + def formatJson(self, data): + content = json.dumps(data, indent=1, sort_keys=True) + + # Make it a little more compact by removing unnecessary white space + def compact_dict(match): + if "\n" in match.group(0): + return match.group(0).replace(match.group(1), match.group(1).strip()) + else: + return match.group(0) + + content = re.sub("\{(\n[^,\[\{]{10,100}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL) + + def compact_list(match): + if "\n" in match.group(0): + stripped_lines = re.sub("\n[ ]*", "", match.group(1)) + return match.group(0).replace(match.group(1), stripped_lines) + else: + return match.group(0) + + content = re.sub("\[([^\[\{]{2,300}?)\][, ]{0,2}\n", compact_list, content, flags=re.DOTALL) + + # Remove end of line whitespace + content = re.sub("(?m)[ ]+$", "", content) + return content # Write formatted json file def writeJson(self, inner_path, data): # Write to disk - self.write(inner_path, helper.jsonDumps(data).encode("utf8")) + self.write(inner_path, self.formatJson(data).encode("utf8")) # Get file size def getSize(self, inner_path): path = self.getPath(inner_path) try: return os.path.getsize(path) - except Exception: + except: return 0 # File exist @@ -407,7 +374,7 @@ class SiteStorage(object): if not inner_path: return self.directory - if "../" in inner_path: + if ".." in inner_path: raise Exception("File not allowed: %s" % inner_path) return "%s/%s" % (self.directory, inner_path) @@ -463,8 +430,7 @@ class SiteStorage(object): else: try: ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception as _err: - err = _err + except Exception as err: ok = False if not ok: @@ -541,15 +507,10 @@ class SiteStorage(object): self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check)) # Delete site's all file - @thread_pool_fs_batch.wrap def deleteFiles(self): - site_title = self.site.content_manager.contents.get("content.json", {}).get("title", self.site.address) - message_id = "delete-%s" % self.site.address - self.log.debug("Deleting files from content.json (title: %s)..." % site_title) - + self.log.debug("Deleting files from content.json...") files = [] # Get filenames - content_inner_paths = list(self.site.content_manager.contents.keys()) - for i, content_inner_path in enumerate(content_inner_paths): + for content_inner_path in list(self.site.content_manager.contents.keys()): content = self.site.content_manager.contents.get(content_inner_path, {}) files.append(content_inner_path) # Add normal files @@ -561,16 +522,9 @@ class SiteStorage(object): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir files.append(file_inner_path) - if i % 100 == 0: - num_files = len(files) - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Collected {num_files} files"), - message_id, (i / len(content_inner_paths)) * 25 - ) - if self.isFile("dbschema.json"): self.log.debug("Deleting db file...") - self.closeDb("Deleting site") + self.closeDb() self.has_db = False try: schema = self.loadJson("dbschema.json") @@ -580,8 +534,7 @@ class SiteStorage(object): except Exception as err: self.log.error("Db file delete error: %s" % err) - num_files = len(files) - for i, inner_path in enumerate(files): + for inner_path in files: path = self.getPath(inner_path) if os.path.isfile(path): for retry in range(5): @@ -591,46 +544,21 @@ class SiteStorage(object): except Exception as err: self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry)) time.sleep(float(retry) / 10) - if i % 100 == 0: - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Deleting file {i}/{num_files}"), - message_id, 25 + (i / num_files) * 50 - ) self.onUpdated(inner_path, False) self.log.debug("Deleting empty dirs...") - i = 0 for root, dirs, files in os.walk(self.directory, topdown=False): for dir in dirs: path = os.path.join(root, dir) - if os.path.isdir(path): - try: - i += 1 - if i % 100 == 0: - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Deleting empty directories {i}"), - message_id, 85 - ) - os.rmdir(path) - except OSError: # Not empty - pass - + if os.path.isdir(path) and os.listdir(path) == []: + os.rmdir(path) + self.log.debug("Removing %s" % path) if os.path.isdir(self.directory) and os.listdir(self.directory) == []: os.rmdir(self.directory) # Remove sites directory if empty if os.path.isdir(self.directory): self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory) - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Site deleted, but some unknown files left in the directory"), - message_id, 100 - ) return False # Some files not deleted else: - self.log.debug("Site %s data directory deleted: %s..." % (site_title, self.directory)) - - self.site.messageWebsocket( - _("Deleting site {site_title}...
    All files deleted successfully"), - message_id, 100 - ) - + self.log.debug("Site data directory deleted: %s..." % self.directory) return True # All clean diff --git a/src/Test/Spy.py b/src/Test/Spy.py index 44422550..8062d063 100644 --- a/src/Test/Spy.py +++ b/src/Test/Spy.py @@ -1,5 +1,3 @@ -import logging - class Spy: def __init__(self, obj, func_name): self.obj = obj @@ -8,12 +6,11 @@ class Spy: self.calls = [] def __enter__(self, *args, **kwargs): - logging.debug("Spy started") def loggedFunc(cls, *args, **kwargs): call = dict(enumerate(args, 1)) call[0] = cls call.update(kwargs) - logging.debug("Spy call: %s" % call) + print("Logging", call) self.calls.append(call) return self.func_original(cls, *args, **kwargs) setattr(self.obj, self.__name__, loggedFunc) diff --git a/src/Test/TestCached.py b/src/Test/TestCached.py deleted file mode 100644 index 088962c0..00000000 --- a/src/Test/TestCached.py +++ /dev/null @@ -1,59 +0,0 @@ -import time - -from util import Cached - - -class CachedObject: - def __init__(self): - self.num_called_add = 0 - self.num_called_multiply = 0 - self.num_called_none = 0 - - @Cached(timeout=1) - def calcAdd(self, a, b): - self.num_called_add += 1 - return a + b - - @Cached(timeout=1) - def calcMultiply(self, a, b): - self.num_called_multiply += 1 - return a * b - - @Cached(timeout=1) - def none(self): - self.num_called_none += 1 - return None - - -class TestCached: - def testNoneValue(self): - cached_object = CachedObject() - assert cached_object.none() is None - assert cached_object.none() is None - assert cached_object.num_called_none == 1 - time.sleep(2) - assert cached_object.none() is None - assert cached_object.num_called_none == 2 - - def testCall(self): - cached_object = CachedObject() - - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcMultiply(1, 2) == 2 - assert cached_object.calcMultiply(1, 2) == 2 - assert cached_object.num_called_add == 1 - assert cached_object.num_called_multiply == 1 - - assert cached_object.calcAdd(2, 3) == 5 - assert cached_object.calcAdd(2, 3) == 5 - assert cached_object.num_called_add == 2 - - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcMultiply(2, 3) == 6 - assert cached_object.num_called_add == 2 - assert cached_object.num_called_multiply == 2 - - time.sleep(2) - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.num_called_add == 3 diff --git a/src/Test/TestConnectionServer.py b/src/Test/TestConnectionServer.py index 82ee605c..f7f62831 100644 --- a/src/Test/TestConnectionServer.py +++ b/src/Test/TestConnectionServer.py @@ -47,7 +47,7 @@ class TestConnection: # Close connection connection.close("Test ended") client.stop() - time.sleep(0.1) + time.sleep(0.01) assert len(file_server.connections) == 0 assert file_server.num_incoming == 2 # One for file_server fixture, one for the test diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py index 7e7ca1a5..687a18d4 100644 --- a/src/Test/TestContent.py +++ b/src/Test/TestContent.py @@ -66,7 +66,7 @@ class TestContent: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Include too large" in str(err.value) + assert "Include too large" in str(err) # Reset data_dict["files"]["data.json"]["size"] = 505 @@ -78,7 +78,7 @@ class TestContent: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "File not allowed" in str(err.value) + assert "File not allowed" in str(err) # Reset del data_dict["files"]["notallowed.exe"] @@ -94,7 +94,7 @@ class TestContent: # Bad privatekey with pytest.raises(SignError) as err: site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) - assert "Private key invalid" in str(err.value) + assert "Private key invalid" in str(err) # Good privatekey content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False) @@ -172,7 +172,7 @@ class TestContent: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong site address" in str(err.value) + assert "Wrong site address" in str(err) # Wrong inner_path data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" @@ -184,7 +184,7 @@ class TestContent: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong inner_path" in str(err.value) + assert "Wrong inner_path" in str(err) # Everything right again data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" @@ -200,7 +200,7 @@ class TestContent: inner_path = "content.json" data_dict = site.storage.loadJson(inner_path) - for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "árvzítűrő/tükörfúrógép.txt"]: + for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg"]: data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} if "sign" in data_dict: @@ -212,7 +212,7 @@ class TestContent: data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]: + for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]: data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} if "sign" in data_dict: @@ -224,14 +224,14 @@ class TestContent: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Invalid relative path" in str(err.value) + assert "Invalid relative path" in str(err) @pytest.mark.parametrize("key", ["ignore", "optional"]) def testSignUnsafePattern(self, site, key): site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*" with pytest.raises(UnsafePatternError) as err: site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False) - assert "Potentially unsafe" in str(err.value) + assert "Potentially unsafe" in str(err) def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib): @@ -239,35 +239,10 @@ class TestContent: with pytest.raises(UnsafePatternError) as err: with site.storage.open("data/test_include/content.json") as data: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err.value) + assert "Potentially unsafe" in str(err) site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0} with pytest.raises(UnsafePatternError) as err: with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data: site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err.value) - - def testPathValidation(self, site): - assert site.content_manager.isValidRelativePath("test.txt") - assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt") - assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt") - assert site.content_manager.isValidRelativePath("тест.текст") - assert site.content_manager.isValidRelativePath("𝐮𝐧𝐢𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆") - - # Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names - - assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed - assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with / - assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \ - assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path - assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character - assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters) - assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space - assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot - assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot - assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows - assert not site.content_manager.isValidRelativePath("CON/any.txt") - assert not site.content_manager.isValidRelativePath("any/lpt1.txt") - assert site.content_manager.isValidRelativePath("any/CONAN") - assert not site.content_manager.isValidRelativePath("any/CONOUT$") - assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed + assert "Potentially unsafe" in str(err) diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py index 8e91dd3e..58b71df0 100644 --- a/src/Test/TestContentUser.py +++ b/src/Test/TestContentUser.py @@ -103,7 +103,7 @@ class TestContentUser: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err.value) + assert "Include too large" in str(err) # Give more space based on address users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} @@ -135,7 +135,7 @@ class TestContentUser: with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err.value) + assert "Include too large" in str(err) users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset # Test max optional size exception @@ -157,7 +157,7 @@ class TestContentUser: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include optional files too large" in str(err.value) + assert "Include optional files too large" in str(err) data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset # hello.exe = Not allowed @@ -169,7 +169,7 @@ class TestContentUser: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Optional file not allowed" in str(err.value) + assert "Optional file not allowed" in str(err) del data_dict["files_optional"]["hello.exe"] # Reset # Includes not allowed in user content @@ -181,7 +181,7 @@ class TestContentUser: data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Includes not allowed" in str(err.value) + assert "Includes not allowed" in str(err) def testCert(self, site): # user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" @@ -238,7 +238,7 @@ class TestContentUser: "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Valid signs: 0/1" in str(err.value) + assert "Valid signs: 0/1" in str(err) del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset # Test invalid cert @@ -253,7 +253,7 @@ class TestContentUser: "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid cert" in str(err.value) + assert "Invalid cert" in str(err) # Test banned user, signed by the site owner user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( @@ -313,7 +313,7 @@ class TestContentUser: "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid domain in cert_user_id" in str(err.value) + assert "Invalid domain in cert_user_id" in str(err) # Test removed cert del user_content["cert_user_id"] @@ -330,7 +330,7 @@ class TestContentUser: "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Missing cert_user_id" in str(err.value) + assert "Missing cert_user_id" in str(err) def testCertSignersPattern(self, site): @@ -366,7 +366,7 @@ class TestContentUser: "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) + assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err) # Removed cert_signers_pattern del rules_content["user_contents"]["cert_signers_pattern"] @@ -376,7 +376,7 @@ class TestContentUser: "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) + assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err) def testNewFile(self, site): diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py index 67f383a3..dda2ca20 100644 --- a/src/Test/TestDb.py +++ b/src/Test/TestDb.py @@ -75,11 +75,6 @@ class TestDb: {"not__title": ["Test #%s" % i for i in range(50, 3000)]} ).fetchone()["num"] == 50 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"title__like": "%20%"} - ).fetchone()["num"] == 1 - # Test named parameter escaping assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike", diff --git a/src/Test/TestDebug.py b/src/Test/TestDebug.py deleted file mode 100644 index e3eb20b3..00000000 --- a/src/Test/TestDebug.py +++ /dev/null @@ -1,52 +0,0 @@ -from Debug import Debug -import gevent -import os -import re - -import pytest - - -class TestDebug: - @pytest.mark.parametrize("items,expected", [ - (["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test - (["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression - (["%s:1" % __file__], ["TestDebug.py line 1"]), - (["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins - (["@/main.py:17"], ["main.py line 17"]), # root - (["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths - ([":1"], []), # importlib builtins - ([":1"], []), # importlib builtins - (["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization - (["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]), - (["/root/main.py:17"], ["/root/main.py line 17"]), - (["{gevent}:13"], ["/__init__.py line 13"]), # modules - (["{os}:13"], [" line 13"]), # python builtin modules - (["src/gevent/event.py:17"], ["/event.py line 17"]), # gevent-overriden __file__ - (["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args - (["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file - (["{os}:1", "@/src/Db/Db.py:17"], [" line 1", "Db.py line 17"]), # builtins - (["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["/__init__.py line 1", "...", "Db.py line 17"]) - ]) - def testFormatTraceback(self, items, expected): - q_items = [] - for item in items: - file, line = item.rsplit(":", 1) - if file.startswith("@"): - file = Debug.root_dir + file[1:] - file = file.replace("{os}", os.__file__) - file = file.replace("{gevent}", gevent.__file__) - q_items.append((file, int(line))) - assert Debug.formatTraceback(q_items) == expected - - def testFormatException(self): - try: - raise ValueError("Test exception") - except Exception: - assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException()) - try: - os.path.abspath(1) - except Exception: - assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException()) - - def testFormatStack(self): - assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack()) diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py index 3fabc271..ef28ee96 100644 --- a/src/Test/TestFileRequest.py +++ b/src/Test/TestFileRequest.py @@ -48,12 +48,6 @@ class TestFileRequest: response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234}) assert "File size does not match" in response["error"] - # Invalid path - for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]: - for sep in ["/", "\\"]: - response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0}) - assert response["error"] == 'File read exception' - connection.close() client.stop() @@ -91,7 +85,7 @@ class TestFileRequest: def testPex(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) diff --git a/src/Test/TestFlag.py b/src/Test/TestFlag.py deleted file mode 100644 index 12fd8165..00000000 --- a/src/Test/TestFlag.py +++ /dev/null @@ -1,39 +0,0 @@ -import os - -import pytest - -from util.Flag import Flag - -class TestFlag: - def testFlagging(self): - flag = Flag() - @flag.admin - @flag.no_multiuser - def testFn(anything): - return anything - - assert "admin" in flag.db["testFn"] - assert "no_multiuser" in flag.db["testFn"] - - def testSubclassedFlagging(self): - flag = Flag() - class Test: - @flag.admin - @flag.no_multiuser - def testFn(anything): - return anything - - class SubTest(Test): - pass - - assert "admin" in flag.db["testFn"] - assert "no_multiuser" in flag.db["testFn"] - - def testInvalidFlag(self): - flag = Flag() - with pytest.raises(Exception) as err: - @flag.no_multiuser - @flag.unknown_flag - def testFn(anything): - return anything - assert "Invalid flag" in str(err.value) diff --git a/src/Test/TestHelper.py b/src/Test/TestHelper.py index 07644ec0..27465cba 100644 --- a/src/Test/TestHelper.py +++ b/src/Test/TestHelper.py @@ -33,7 +33,7 @@ class TestHelper: with pytest.raises(socket.error): helper.packAddress("999.1.1.1", 1) - with pytest.raises(Exception): + with pytest.raises(AssertionError): helper.unpackAddress("X") def testGetDirname(self): diff --git a/src/Test/TestNoparallel.py b/src/Test/TestNoparallel.py index 6fc4f57d..5a1320d3 100644 --- a/src/Test/TestNoparallel.py +++ b/src/Test/TestNoparallel.py @@ -4,16 +4,6 @@ import gevent import pytest import util -from util import ThreadPool - - -@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn']) -def queue_spawn(request): - thread_pool = ThreadPool.ThreadPool(10) - if request.param == "gevent.spawn": - return gevent.spawn - else: - return thread_pool.spawn class ExampleClass(object): @@ -23,7 +13,7 @@ class ExampleClass(object): @util.Noparallel() def countBlocking(self, num=5): for i in range(1, num + 1): - time.sleep(0.1) + time.sleep(0.01) self.counted += 1 return "counted:%s" % i @@ -43,20 +33,20 @@ class ExampleClass(object): class TestNoparallel: - def testBlocking(self, queue_spawn): + def testBlocking(self): obj1 = ExampleClass() obj2 = ExampleClass() # Dont allow to call again until its running and wait until its running threads = [ - queue_spawn(obj1.countBlocking), - queue_spawn(obj1.countBlocking), - queue_spawn(obj1.countBlocking), - queue_spawn(obj2.countBlocking) + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj2.countBlocking) ] assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes gevent.joinall(threads) - assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"] + assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"] # Check the return value for every call obj2.countBlocking() # Allow to call again as obj2.countBlocking finished assert obj1.counted == 5 @@ -64,6 +54,7 @@ class TestNoparallel: def testNoblocking(self): obj1 = ExampleClass() + obj2 = ExampleClass() thread1 = obj1.countNoblocking() thread2 = obj1.countNoblocking() # Ignored @@ -77,24 +68,24 @@ class TestNoparallel: obj1.countNoblocking().join() # Allow again and wait until finishes assert obj1.counted == 10 - def testQueue(self, queue_spawn): + def testQueue(self): obj1 = ExampleClass() - queue_spawn(obj1.countQueue, num=1) - queue_spawn(obj1.countQueue, num=1) - queue_spawn(obj1.countQueue, num=1) + gevent.spawn(obj1.countQueue, num=10) + gevent.spawn(obj1.countQueue, num=10) + gevent.spawn(obj1.countQueue, num=10) - time.sleep(0.3) - assert obj1.counted == 2 # No multi-queue supported + time.sleep(3.0) + assert obj1.counted == 20 # No multi-queue supported obj2 = ExampleClass() - queue_spawn(obj2.countQueue, num=10) - queue_spawn(obj2.countQueue, num=10) + gevent.spawn(obj2.countQueue, num=10) + gevent.spawn(obj2.countQueue, num=10) time.sleep(1.5) # Call 1 finished, call 2 still working assert 10 < obj2.counted < 20 - queue_spawn(obj2.countQueue, num=10) + gevent.spawn(obj2.countQueue, num=10) time.sleep(2.0) assert obj2.counted == 30 @@ -110,16 +101,16 @@ class TestNoparallel: gevent.joinall(threads) assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed) - def testIgnoreClass(self, queue_spawn): + def testIgnoreClass(self): obj1 = ExampleClass() obj2 = ExampleClass() threads = [ - queue_spawn(obj1.countQueue), - queue_spawn(obj1.countQueue), - queue_spawn(obj1.countQueue), - queue_spawn(obj2.countQueue), - queue_spawn(obj2.countQueue) + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue), + gevent.spawn(obj2.countQueue), + gevent.spawn(obj2.countQueue) ] s = time.time() time.sleep(0.001) @@ -131,37 +122,11 @@ class TestNoparallel: taken = time.time() - s assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s - def testException(self, queue_spawn): - class MyException(Exception): - pass - + def testException(self): @util.Noparallel() def raiseException(): - raise MyException("Test error!") + raise Exception("Test error!") - with pytest.raises(MyException) as err: + with pytest.raises(Exception) as err: raiseException() - assert str(err.value) == "Test error!" - - with pytest.raises(MyException) as err: - queue_spawn(raiseException).get() - assert str(err.value) == "Test error!" - - def testMultithreadMix(self, queue_spawn): - obj1 = ExampleClass() - with ThreadPool.ThreadPool(10) as thread_pool: - s = time.time() - t1 = queue_spawn(obj1.countBlocking, 5) - time.sleep(0.01) - t2 = thread_pool.spawn(obj1.countBlocking, 5) - time.sleep(0.01) - t3 = thread_pool.spawn(obj1.countBlocking, 5) - time.sleep(0.3) - t4 = gevent.spawn(obj1.countBlocking, 5) - threads = [t1, t2, t3, t4] - for thread in threads: - assert thread.get() == "counted:5" - - time_taken = time.time() - s - assert obj1.counted == 5 - assert 0.5 < time_taken < 0.7 + assert str(err) == "Test error!" diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py index f57e046e..f7bdb6da 100644 --- a/src/Test/TestPeer.py +++ b/src/Test/TestPeer.py @@ -15,7 +15,7 @@ class TestPeer: def testPing(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) @@ -34,7 +34,7 @@ class TestPeer: def testDownloadFile(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) @@ -129,7 +129,7 @@ class TestPeer: def testFindHash(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client diff --git a/src/Test/TestSafeRe.py b/src/Test/TestSafeRe.py index 429bde50..b8037123 100644 --- a/src/Test/TestSafeRe.py +++ b/src/Test/TestSafeRe.py @@ -15,10 +15,10 @@ class TestSafeRe: def testUnsafeMatch(self, pattern): with pytest.raises(SafeRe.UnsafePatternError) as err: SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "Potentially unsafe" in str(err.value) + assert "Potentially unsafe" in str(err) @pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"]) def testUnsafeRepetition(self, pattern): with pytest.raises(SafeRe.UnsafePatternError) as err: SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "More than" in str(err.value) + assert "More than" in str(err) diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py index cd0a4c9f..1d4ba4c1 100644 --- a/src/Test/TestSiteDownload.py +++ b/src/Test/TestSiteDownload.py @@ -3,7 +3,6 @@ import time import pytest import mock import gevent -import gevent.event import os from Connection import ConnectionServer @@ -27,16 +26,14 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.storage.isFile("content.json") + site_temp.download(blind_includes=True).join(timeout=5) # Rename non-optional file os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png")) @@ -53,7 +50,7 @@ class TestSiteDownload: with Spy.Spy(FileRequest, "route") as requests: site.publish() time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + site_temp.download(blind_includes=True).join(timeout=5) # Wait for download assert "streamFile" not in [req[1] for req in requests] content = site_temp.storage.loadJson("content.json") @@ -78,14 +75,14 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) assert site_temp.settings["optional_downloaded"] == 0 @@ -109,7 +106,7 @@ class TestSiteDownload: with Spy.Spy(FileRequest, "route") as requests: site.publish() time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + site_temp.download(blind_includes=True).join(timeout=5) # Wait for download assert "streamFile" not in [req[1] for req in requests] content = site_temp.storage.loadJson("content.json") @@ -134,12 +131,12 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Download normally site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] assert not bad_files @@ -148,7 +145,7 @@ class TestSiteDownload: assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 # Add archived data - assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"] + assert not "archived" in site.content_manager.contents["data/users/content.json"]["user_contents"] assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()} @@ -163,7 +160,7 @@ class TestSiteDownload: assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] site.publish() time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + site_temp.download(blind_includes=True).join(timeout=5) # Wait for download # The archived content should disappear from remote client assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] @@ -182,12 +179,12 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Download normally site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] assert not bad_files @@ -212,7 +209,7 @@ class TestSiteDownload: assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] site.publish() time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + site_temp.download(blind_includes=True).join(timeout=5) # Wait for download # The archived content should disappear from remote client assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] @@ -239,7 +236,7 @@ class TestSiteDownload: site_temp.addPeer(file_server.ip, 1544) # Download site - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -304,7 +301,7 @@ class TestSiteDownload: # Download normal files site_temp.log.info("Start Downloading site") - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") @@ -334,7 +331,7 @@ class TestSiteDownload: assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections] - site_full.content_manager.contents.db.close("FindOptional test end") + site_full.content_manager.contents.db.close() def testUpdate(self, file_server, site, site_temp): assert site.storage.directory == config.data_dir + "/" + site.address @@ -346,7 +343,7 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Don't try to find peers from the net @@ -357,8 +354,7 @@ class TestSiteDownload: site_temp.addPeer(file_server.ip, 1544) # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert len(site_temp.bad_files) == 1 + site_temp.download(blind_includes=True).join(timeout=5) # Update file data_original = site.storage.open("data/data.json").read() @@ -376,8 +372,7 @@ class TestSiteDownload: site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") site.publish() time.sleep(0.1) - site.log.info("Downloading site") - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1 assert site_temp.storage.open("data/data.json").read() == data_new @@ -408,12 +403,9 @@ class TestSiteDownload: site.log.info("Publish new data.json with patch") with Spy.Spy(FileRequest, "route") as requests: site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - event_done = gevent.event.AsyncResult() site.publish(diffs=diffs) - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert [request for request in requests if request[1] in ("getFile", "streamFile")] == [] + site_temp.download(blind_includes=True).join(timeout=5) + assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 0 assert site_temp.storage.open("data/data.json").read() == data_new @@ -427,15 +419,14 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Connect peers site_temp.addPeer(file_server.ip, 1544) # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] + site_temp.download(blind_includes=True).join(timeout=5) # Update file data_original = site.storage.open("data/data.json").read() @@ -454,7 +445,7 @@ class TestSiteDownload: assert "data/data.json" in diffs content_json = site.storage.loadJson("content.json") - content_json["description"] = "BigZeroBlog" * 1024 * 10 + content_json["title"] = "BigZeroBlog" * 1024 * 10 site.storage.writeJson("content.json", content_json) site.content_manager.loadContent("content.json", force=True) @@ -464,99 +455,8 @@ class TestSiteDownload: site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json site.publish(diffs=diffs) - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")] assert len(file_requests) == 1 assert site_temp.storage.open("data/data.json").read() == data_new - assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() - - # Test what happened if the content.json of the site is bigger than the site limit - def testHugeContentSiteUpdate(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Connect peers - site_temp.addPeer(file_server.ip, 1544) - - # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - site_temp.settings["size_limit"] = int(20 * 1024 *1024) - site_temp.saveSettings() - - # Raise limit size to 20MB on site so it can be signed - site.settings["size_limit"] = int(20 * 1024 *1024) - site.saveSettings() - - content_json = site.storage.loadJson("content.json") - content_json["description"] = "PartirUnJour" * 1024 * 1024 - site.storage.writeJson("content.json", content_json) - changed, deleted = site.content_manager.loadContent("content.json", force=True) - - # Make sure we have 2 differents content.json - assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read() - - # Generate diff - diffs = site.content_manager.getDiffs("content.json") - - # Publish with patch - site.log.info("Publish new content.json bigger than 10MB") - with Spy.Spy(FileRequest, "route") as requests: - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB - time.sleep(0.1) - site.publish(diffs=diffs) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024 - assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() - - def testUnicodeFilename(self, file_server, site, site_temp): - assert site.storage.directory == config.data_dir + "/" + site.address - assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - site_temp.addPeer(file_server.ip, 1544) - - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - site.storage.write("data/img/árvíztűrő.png", b"test") - - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - content = site.storage.loadJson("content.json") - assert "data/img/árvíztűrő.png" in content["files"] - assert not site_temp.storage.isFile("data/img/árvíztűrő.png") - settings_before = site_temp.settings - - with Spy.Spy(FileRequest, "route") as requests: - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1 - - content = site_temp.storage.loadJson("content.json") - assert "data/img/árvíztűrő.png" in content["files"] - assert site_temp.storage.isFile("data/img/árvíztűrő.png") - - assert site_temp.settings["size"] == settings_before["size"] - assert site_temp.settings["size_optional"] == settings_before["size_optional"] - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] diff --git a/src/Test/TestSiteStorage.py b/src/Test/TestSiteStorage.py index f11262bf..e9977e8e 100644 --- a/src/Test/TestSiteStorage.py +++ b/src/Test/TestSiteStorage.py @@ -20,6 +20,3 @@ class TestSiteStorage: # Subdir assert set(site.storage.list("data-default")) == set(["data.json", "users"]) - - def testDbRebuild(self, site): - assert site.storage.rebuildDb() diff --git a/src/Test/TestThreadPool.py b/src/Test/TestThreadPool.py deleted file mode 100644 index 5e95005e..00000000 --- a/src/Test/TestThreadPool.py +++ /dev/null @@ -1,163 +0,0 @@ -import time -import threading - -import gevent -import pytest - -from util import ThreadPool - - -class TestThreadPool: - def testExecutionOrder(self): - with ThreadPool.ThreadPool(4) as pool: - events = [] - - @pool.wrap - def blocker(): - events.append("S") - out = 0 - for i in range(10000000): - if i == 3000000: - events.append("M") - out += 1 - events.append("D") - return out - - threads = [] - for i in range(3): - threads.append(gevent.spawn(blocker)) - gevent.joinall(threads) - - assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3 - - res = blocker() - assert res == 10000000 - - def testLockBlockingSameThread(self): - lock = ThreadPool.Lock() - - s = time.time() - - def unlocker(): - time.sleep(1) - lock.release() - - gevent.spawn(unlocker) - lock.acquire(True) - lock.acquire(True, timeout=2) - - unlock_taken = time.time() - s - - assert 1.0 < unlock_taken < 1.5 - - def testLockBlockingDifferentThread(self): - lock = ThreadPool.Lock() - - def locker(): - lock.acquire(True) - time.sleep(0.5) - lock.release() - - with ThreadPool.ThreadPool(10) as pool: - threads = [ - pool.spawn(locker), - pool.spawn(locker), - gevent.spawn(locker), - pool.spawn(locker) - ] - time.sleep(0.1) - - s = time.time() - - lock.acquire(True, 5.0) - - unlock_taken = time.time() - s - - assert 1.8 < unlock_taken < 2.2 - - gevent.joinall(threads) - - def testMainLoopCallerThreadId(self): - main_thread_id = threading.current_thread().ident - with ThreadPool.ThreadPool(5) as pool: - def getThreadId(*args, **kwargs): - return threading.current_thread().ident - - t = pool.spawn(getThreadId) - assert t.get() != main_thread_id - - t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId)) - assert t.get() == main_thread_id - - def testMainLoopCallerGeventSpawn(self): - main_thread_id = threading.current_thread().ident - with ThreadPool.ThreadPool(5) as pool: - def waiter(): - time.sleep(1) - return threading.current_thread().ident - - def geventSpawner(): - event = ThreadPool.main_loop.call(gevent.spawn, waiter) - - with pytest.raises(Exception) as greenlet_err: - event.get() - assert str(greenlet_err.value) == "cannot switch to a different thread" - - waiter_thread_id = ThreadPool.main_loop.call(event.get) - return waiter_thread_id - - s = time.time() - waiter_thread_id = pool.apply(geventSpawner) - assert main_thread_id == waiter_thread_id - time_taken = time.time() - s - assert 0.9 < time_taken < 1.2 - - def testEvent(self): - with ThreadPool.ThreadPool(5) as pool: - event = ThreadPool.Event() - - def setter(): - time.sleep(1) - event.set("done!") - - def getter(): - return event.get() - - pool.spawn(setter) - t_gevent = gevent.spawn(getter) - t_pool = pool.spawn(getter) - s = time.time() - assert event.get() == "done!" - time_taken = time.time() - s - gevent.joinall([t_gevent, t_pool]) - - assert t_gevent.get() == "done!" - assert t_pool.get() == "done!" - - assert 0.9 < time_taken < 1.2 - - with pytest.raises(Exception) as err: - event.set("another result") - - assert "Event already has value" in str(err.value) - - def testMemoryLeak(self): - import gc - thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))] - - def worker(): - time.sleep(0.1) - return "ok" - - def poolTest(): - with ThreadPool.ThreadPool(5) as pool: - for i in range(20): - pool.spawn(worker) - - for i in range(5): - poolTest() - new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before] - #print("New objs:", new_thread_objs, "run:", num_run) - - # Make sure no threadpool object left behind - assert not new_thread_objs diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py index e6b82c1a..63ff47f9 100644 --- a/src/Test/TestTor.py +++ b/src/Test/TestTor.py @@ -4,7 +4,7 @@ import pytest import mock from File import FileServer -from Crypt import CryptTor +from Crypt import CryptRsa from Config import config @pytest.mark.usefixtures("resetSettings") @@ -34,17 +34,17 @@ class TestTor: address = tor_manager.addOnion() # Sign - sign = CryptTor.sign(b"hello", tor_manager.getPrivatekey(address)) + sign = CryptRsa.sign(b"hello", tor_manager.getPrivatekey(address)) assert len(sign) == 128 # Verify - publickey = CryptTor.privatekeyToPublickey(tor_manager.getPrivatekey(address)) + publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address)) assert len(publickey) == 140 - assert CryptTor.verify(b"hello", publickey, sign) - assert not CryptTor.verify(b"not hello", publickey, sign) + assert CryptRsa.verify(b"hello", publickey, sign) + assert not CryptRsa.verify(b"not hello", publickey, sign) # Pub to address - assert CryptTor.publickeyToOnion(publickey) == address + assert CryptRsa.publickeyToOnion(publickey) == address # Delete tor_manager.delOnion(address) @@ -117,7 +117,7 @@ class TestTor: file_server.tor_manager = tor_manager client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client diff --git a/src/Test/TestUiWebsocket.py b/src/Test/TestUiWebsocket.py deleted file mode 100644 index d2d23d03..00000000 --- a/src/Test/TestUiWebsocket.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys -import pytest - -@pytest.mark.usefixtures("resetSettings") -class TestUiWebsocket: - def testPermission(self, ui_websocket): - res = ui_websocket.testAction("ping") - assert res == "pong" - - res = ui_websocket.testAction("certList") - assert "You don't have permission" in res["error"] diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py index f17c77bd..635af338 100644 --- a/src/Test/TestUpnpPunch.py +++ b/src/Test/TestUpnpPunch.py @@ -126,9 +126,9 @@ class TestUpnpPunch(object): def test_parse_for_errors_bad_rsp(self, httplib_response): rsp = httplib_response(status=500) - with pytest.raises(upnp.IGDError) as err: + with pytest.raises(upnp.IGDError) as exc: upnp._parse_for_errors(rsp) - assert 'Unable to parse' in str(err.value) + assert 'Unable to parse' in str(exc) def test_parse_for_errors_error(self, httplib_response): soap_error = ('' @@ -136,9 +136,9 @@ class TestUpnpPunch(object): 'Bad request' '') rsp = httplib_response(status=500, body=soap_error) - with pytest.raises(upnp.IGDError) as err: + with pytest.raises(upnp.IGDError) as exc: upnp._parse_for_errors(rsp) - assert 'SOAP request error' in str(err.value) + assert 'SOAP request error' in str(exc) def test_parse_for_errors_good_rsp(self, httplib_response): rsp = httplib_response(status=200) diff --git a/src/Test/TestWorkerTaskManager.py b/src/Test/TestWorkerTaskManager.py deleted file mode 100644 index eb5c4a2a..00000000 --- a/src/Test/TestWorkerTaskManager.py +++ /dev/null @@ -1,128 +0,0 @@ -import pytest - -from Worker import WorkerTaskManager -from . import Spy - - -class TestUiWebsocket: - def checkSort(self, tasks): # Check if it has the same order as a list sorted separately - tasks_list = list(tasks) - tasks_list.sort(key=lambda task: task["id"]) - assert tasks_list != list(tasks) - tasks_list.sort(key=lambda task: (0 - (task["priority"] - task["workers_num"] * 10), task["id"])) - assert tasks_list == list(tasks) - - def testAppendSimple(self): - tasks = WorkerTaskManager.WorkerTaskManager() - tasks.append({"id": 1, "priority": 15, "workers_num": 1, "inner_path": "file1.json"}) - tasks.append({"id": 2, "priority": 1, "workers_num": 0, "inner_path": "file2.json"}) - tasks.append({"id": 3, "priority": 8, "workers_num": 0, "inner_path": "file3.json"}) - assert [task["inner_path"] for task in tasks] == ["file3.json", "file1.json", "file2.json"] - - self.checkSort(tasks) - - def testAppendMany(self): - tasks = WorkerTaskManager.WorkerTaskManager() - for i in range(1000): - tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) - assert tasks[0]["inner_path"] == "file39.json" - assert tasks[-1]["inner_path"] == "file980.json" - - self.checkSort(tasks) - - def testRemove(self): - tasks = WorkerTaskManager.WorkerTaskManager() - for i in range(1000): - tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) - - i = 333 - task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i} - assert task in tasks - - with Spy.Spy(tasks, "indexSlow") as calls: - tasks.remove(task) - assert len(calls) == 0 - - assert task not in tasks - - # Remove non existent item - with Spy.Spy(tasks, "indexSlow") as calls: - with pytest.raises(ValueError): - tasks.remove(task) - assert len(calls) == 0 - - self.checkSort(tasks) - - def testRemoveAll(self): - tasks = WorkerTaskManager.WorkerTaskManager() - tasks_list = [] - for i in range(1000): - task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i} - tasks.append(task) - tasks_list.append(task) - - for task in tasks_list: - tasks.remove(task) - - assert len(tasks.inner_paths) == 0 - assert len(tasks) == 0 - - def testModify(self): - tasks = WorkerTaskManager.WorkerTaskManager() - for i in range(1000): - tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) - - task = tasks[333] - task["priority"] += 10 - - with pytest.raises(AssertionError): - self.checkSort(tasks) - - with Spy.Spy(tasks, "indexSlow") as calls: - tasks.updateItem(task) - assert len(calls) == 1 - - assert task in tasks - - self.checkSort(tasks) - - # Check reorder optimization - with Spy.Spy(tasks, "indexSlow") as calls: - tasks.updateItem(task, "priority", task["priority"] + 10) - assert len(calls) == 0 - - with Spy.Spy(tasks, "indexSlow") as calls: - tasks.updateItem(task, "priority", task["workers_num"] - 1) - assert len(calls) == 0 - - self.checkSort(tasks) - - def testModifySamePriority(self): - tasks = WorkerTaskManager.WorkerTaskManager() - for i in range(1000): - tasks.append({"id": i, "priority": 10, "workers_num": 5, "inner_path": "file%s.json" % i}) - - task = tasks[333] - - # Check reorder optimization - with Spy.Spy(tasks, "indexSlow") as calls: - tasks.updateItem(task, "priority", task["workers_num"] - 1) - assert len(calls) == 0 - - def testIn(self): - tasks = WorkerTaskManager.WorkerTaskManager() - - i = 1 - task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i} - - assert task not in tasks - - def testFindTask(self): - tasks = WorkerTaskManager.WorkerTaskManager() - for i in range(1000): - tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) - - assert tasks.findTask("file999.json") - assert not tasks.findTask("file-unknown.json") - tasks.remove(tasks.findTask("file999.json")) - assert not tasks.findTask("file999.json") diff --git a/src/Test/conftest.py b/src/Test/conftest.py index c8739086..7d5558ba 100644 --- a/src/Test/conftest.py +++ b/src/Test/conftest.py @@ -8,23 +8,15 @@ import shutil import gc import datetime import atexit -import threading -import socket import pytest import mock import gevent -if "libev" not in str(gevent.config.loop): - # Workaround for random crash when libuv used with threads - gevent.config.loop = "libev-cext" - import gevent.event from gevent import monkey monkey.patch_all(thread=False, subprocess=False) -atexit_register = atexit.register -atexit.register = lambda func: "" # Don't register shutdown functions to avoid IO error on exit def pytest_addoption(parser): parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests") @@ -46,7 +38,7 @@ else: CHROMEDRIVER_PATH = "chromedriver" SITE_URL = "http://127.0.0.1:43110" -TEST_DATA_PATH = 'src/Test/testdata' +TEST_DATA_PATH = 'src/Test/testdata' sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/../lib")) # External modules directory sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/..")) # Imports relative to src dir @@ -76,44 +68,26 @@ config.verbose = True # Use test data for unittests config.tor = "disable" # Don't start Tor client config.trackers = [] config.data_dir = TEST_DATA_PATH # Use test data for unittests -if "ZERONET_LOG_DIR" in os.environ: - config.log_dir = os.environ["ZERONET_LOG_DIR"] -config.initLogging(console_logging=False) +config.initLogging() # Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log) -time_start = time.time() class TimeFilter(logging.Filter): - def __init__(self, *args, **kwargs): - self.time_last = time.time() - self.main_thread_id = threading.current_thread().ident - super().__init__(*args, **kwargs) def filter(self, record): - if threading.current_thread().ident != self.main_thread_id: - record.thread_marker = "T" - record.thread_title = "(Thread#%s)" % self.main_thread_id - else: - record.thread_marker = " " - record.thread_title = "" + try: + last = self.last + except AttributeError: + last = record.relativeCreated - since_last = time.time() - self.time_last - if since_last > 0.1: - line_marker = "!" - elif since_last > 0.02: - line_marker = "*" - elif since_last > 0.01: - line_marker = "-" - else: - line_marker = " " + delta = datetime.datetime.fromtimestamp(record.relativeCreated / 1000.0) - datetime.datetime.fromtimestamp(last / 1000.0) - since_start = time.time() - time_start - record.since_start = "%s%.3fs" % (line_marker, since_start) + record.relative = '{0:.3f}'.format(delta.seconds + delta.microseconds / 1000000.0) - self.time_last = time.time() + self.last = record.relativeCreated return True log = logging.getLogger() -fmt = logging.Formatter(fmt='%(since_start)s %(thread_marker)s %(levelname)-8s %(name)s %(message)s %(thread_title)s') +fmt = logging.Formatter(fmt='+%(relative)ss %(levelname)-8s %(name)s %(message)s') [hndl.addFilter(TimeFilter()) for hndl in log.handlers] [hndl.setFormatter(fmt) for hndl in log.handlers] @@ -131,21 +105,19 @@ from util import RateLimit from Db import Db from Debug import Debug -gevent.get_hub().NOT_ERROR += (Debug.Notify,) def cleanup(): Db.dbCloseAll() for dir_path in [config.data_dir, config.data_dir + "-temp"]: - if os.path.isdir(dir_path): - for file_name in os.listdir(dir_path): - ext = file_name.rsplit(".", 1)[-1] - if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]: - continue - file_path = dir_path + "/" + file_name - if os.path.isfile(file_path): - os.unlink(file_path) + for file_name in os.listdir(dir_path): + ext = file_name.rsplit(".", 1)[-1] + if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]: + continue + file_path = dir_path + "/" + file_name + if os.path.isfile(file_path): + os.unlink(file_path) -atexit_register(cleanup) +atexit.register(cleanup) @pytest.fixture(scope="session") def resetSettings(request): @@ -205,9 +177,10 @@ def site(request): site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net def cleanup(): - site.delete() - site.content_manager.contents.db.close("Test cleanup") - site.content_manager.contents.db.timer_check_optional.kill() + site.storage.deleteFiles() + site.content_manager.contents.db.deleteSite(site) + del SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] + site.content_manager.contents.db.close() SiteManager.site_manager.sites.clear() db_path = "%s/content.db" % config.data_dir os.unlink(db_path) @@ -215,12 +188,10 @@ def site(request): gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) request.addfinalizer(cleanup) - site.greenlet_manager.stopGreenlets() site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files if not SiteManager.site_manager.sites: SiteManager.site_manager.sites = {} SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site - site.settings["serving"] = True return site @@ -229,19 +200,17 @@ def site_temp(request): threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)] with mock.patch("Config.config.data_dir", config.data_dir + "-temp"): site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - site_temp.settings["serving"] = True site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net def cleanup(): - site_temp.delete() - site_temp.content_manager.contents.db.close("Test cleanup") - site_temp.content_manager.contents.db.timer_check_optional.kill() + site_temp.storage.deleteFiles() + site_temp.content_manager.contents.db.deleteSite(site_temp) + site_temp.content_manager.contents.db.close() db_path = "%s-temp/content.db" % config.data_dir os.unlink(db_path) del ContentDb.content_dbs[db_path] gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) request.addfinalizer(cleanup) - site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short) return site_temp @@ -321,16 +290,6 @@ def file_server4(request): @pytest.fixture def file_server6(request): - try: - sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) - sock.connect(("::1", 80, 1, 1)) - has_ipv6 = True - except OSError: - has_ipv6 = False - if not has_ipv6: - pytest.skip("Ipv6 not supported") - - time.sleep(0.1) file_server6 = FileServer("::1", 1544) file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip @@ -365,13 +324,10 @@ def ui_websocket(site, user): self.result = gevent.event.AsyncResult() def send(self, data): - logging.debug("WsMock: Set result (data: %s) called by %s" % (data, Debug.formatStack())) self.result.set(json.loads(data)["result"]) def getResult(self): - logging.debug("WsMock: Get result") back = self.result.get() - logging.debug("WsMock: Got result (data: %s)" % back) self.result = gevent.event.AsyncResult() return back @@ -379,8 +335,9 @@ def ui_websocket(site, user): ui_websocket = UiWebsocket(ws_mock, site, None, user, None) def testAction(action, *args, **kwargs): - ui_websocket.handleRequest({"id": 0, "cmd": action, "params": list(args) if args else kwargs}) - return ui_websocket.ws.getResult() + func = getattr(ui_websocket, "action%s" % action) + func(0, *args, **kwargs) + return ui_websocket.ws.result.get() ui_websocket.testAction = testAction return ui_websocket @@ -440,58 +397,15 @@ def db(request): db.checkTables() def stop(): - db.close("Test db cleanup") + db.close() os.unlink(db_path) request.addfinalizer(stop) return db -@pytest.fixture(params=["sslcrypto", "sslcrypto_fallback", "libsecp256k1"]) +@pytest.fixture(params=["btctools", "openssl", "libsecp256k1"]) def crypt_bitcoin_lib(request, monkeypatch): monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param) CryptBitcoin.loadLib(request.param) return CryptBitcoin - -@pytest.fixture(scope='function', autouse=True) -def logCaseStart(request): - global time_start - time_start = time.time() - logging.debug("---- Start test case: %s ----" % request._pyfuncitem) - yield None # Wait until all test done - - -# Workaround for pytest bug when logging in atexit/post-fixture handlers (I/O operation on closed file) -def workaroundPytestLogError(): - import _pytest.capture - write_original = _pytest.capture.EncodedFile.write - - def write_patched(obj, *args, **kwargs): - try: - write_original(obj, *args, **kwargs) - except ValueError as err: - if str(err) == "I/O operation on closed file": - pass - else: - raise err - - def flush_patched(obj, *args, **kwargs): - try: - obj.buffer.flush(*args, **kwargs) - except ValueError as err: - if str(err).startswith("I/O operation on closed file"): - pass - else: - raise err - - _pytest.capture.EncodedFile.write = write_patched - _pytest.capture.EncodedFile.flush = flush_patched - - -workaroundPytestLogError() - -@pytest.fixture(scope='session', autouse=True) -def disableLog(): - yield None # Wait until all test done - logging.getLogger('').setLevel(logging.getLevelName(logging.CRITICAL)) - diff --git a/src/Test/pytest.ini b/src/Test/pytest.ini index 0ffb385f..d09210d1 100644 --- a/src/Test/pytest.ini +++ b/src/Test/pytest.ini @@ -1,6 +1,5 @@ [pytest] python_files = Test*.py -addopts = -rsxX -v --durations=6 --capture=fd +addopts = -rsxX -v --durations=6 markers = - slow: mark a tests as slow. - webtest: mark a test as a webtest. + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/src/Tor/TorManager.py b/src/Tor/TorManager.py index 865d8fbf..e6c163d7 100644 --- a/src/Tor/TorManager.py +++ b/src/Tor/TorManager.py @@ -12,13 +12,14 @@ import atexit import gevent from Config import config - -from lib import Ed25519 -from Crypt import CryptTor - +from Crypt import CryptRsa from Site import SiteManager import socks -from gevent.lock import RLock +try: + from gevent.coros import RLock +except: + from gevent.lock import RLock +from util import helper from Debug import Debug from Plugin import PluginManager @@ -37,7 +38,6 @@ class TorManager(object): self.lock = RLock() self.starting = True self.connecting = True - self.status = None self.event_started = gevent.event.AsyncResult() if config.tor == "disable": @@ -64,7 +64,7 @@ class TorManager(object): self.starting = True try: if not self.connect(): - raise Exception(self.status) + raise Exception("No connection") self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) except Exception as err: if sys.platform.startswith("win") and os.path.isfile(self.tor_exe): @@ -72,8 +72,7 @@ class TorManager(object): # Change to self-bundled Tor ports self.port = 49051 self.proxy_port = 49050 - if config.tor == "always": - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) + socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) self.enabled = True if not self.connect(): self.startTor() @@ -155,22 +154,17 @@ class TorManager(object): res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn) elif cookie_match: cookie_file = cookie_match.group(1).encode("ascii").decode("unicode_escape") - if not os.path.isfile(cookie_file) and self.tor_process: - # Workaround for tor client cookie auth file utf8 encoding bug (https://github.com/torproject/stem/issues/57) - cookie_file = os.path.dirname(self.tor_exe) + "\\data\\control_auth_cookie" auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) res_auth = self.send("AUTHENTICATE %s" % auth_hex.decode("utf8"), conn) else: res_auth = self.send("AUTHENTICATE", conn) - if "250 OK" not in res_auth: - raise Exception("Authenticate error %s" % res_auth) + assert "250 OK" in res_auth, "Authenticate error %s" % res_auth # Version 0.2.7.5 required because ADD_ONION support res_version = self.send("GETINFO version", conn) - version = re.search(r'version=([0-9\.]+)', res_version).group(1) - if float(version.replace(".", "0", 2)) < 207.5: - raise Exception("Tor version >=0.2.7.5 required, found: %s" % version) + version = re.search('version=([0-9\.]+)', res_version).group(1) + assert float(version.replace(".", "0", 2)) >= 207.5, "Tor version >=0.2.7.5 required, found: %s" % version self.setStatus("Connected (%s)" % res_auth) self.event_started.set(True) @@ -217,8 +211,8 @@ class TorManager(object): return False def makeOnionAndKey(self): - res = self.request("ADD_ONION NEW:ED25519-V3 port=%s" % self.fileserver_port) - match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=ED25519-V3:(.*?)[\r\n]", res, re.DOTALL) + res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port) + match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL) if match: onion_address, onion_privatekey = match.groups() return (onion_address, onion_privatekey) @@ -274,7 +268,7 @@ class TorManager(object): return self.privatekeys[address] def getPublickey(self, address): - return CryptTor.privatekeyToPublickey(self.privatekeys[address]) + return CryptRsa.privatekeyToPublickey(self.privatekeys[address]) def getOnion(self, site_address): if not self.enabled: diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py index e73f9be1..4163d333 100644 --- a/src/Translate/Translate.py +++ b/src/Translate/Translate.py @@ -28,7 +28,7 @@ class EscapeProxy(dict): class Translate(dict): def __init__(self, lang_dir=None, lang=None): if not lang_dir: - lang_dir = os.path.dirname(__file__) + "/languages/" + lang_dir = "src/Translate/languages/" if not lang: lang = config.language self.lang = lang @@ -94,9 +94,9 @@ class Translate(dict): def pluralize(self, value, single, multi): if value > 1: - return self[multi].format(value) - else: return self[single].format(value) + else: + return self[multi].format(value) def translateData(self, data, translate_table=None, mode="js"): if not translate_table: diff --git a/src/Translate/languages/fa.json b/src/Translate/languages/fa.json deleted file mode 100644 index e644247a..00000000 --- a/src/Translate/languages/fa.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "تبریک، درگاه {0} شما باز شده است.
    شما یک عضو تمام شبکه ZeroNet هستید!", - "Tor mode active, every connection using Onion route.": "حالت Tor فعال است، هر ارتباط از مسیریابی پیاز (Onion) استفاده می‌کند.", - "Successfully started Tor onion hidden services.": "خدمات پنهان پیاز (Onion) Tor با موفقیت راه‌اندازی شد.", - "Unable to start hidden services, please check your config.": "قادر به راه‌اندازی خدمات پنهان نیستیم، لطفا تنظیمات خود را بررسی نمایید.", - "For faster connections open {0} port on your router.": "برای ارتباطات سریعتر درگاه {0} را بر روی مسیریاب (روتر) خود باز نمایید.", - "Your connection is restricted. Please, open {0} port on your router": "ارتباط شما محدود‌شده است. لطفا درگاه {0} را در مسیریاب (روتر) خود باز نمایید", - "or configure Tor to become a full member of the ZeroNet network.": "یا پیکربندی Tor را انجام دهید تا به یک عضو تمام شبکه ZeroNet تبدیل شوید.", - - "Select account you want to use in this site:": "حسابی را که می‌خواهید در این سایت استفاده کنید، انتخاب کنید:", - "currently selected": "در حال حاضر انتخاب‌شده", - "Unique to site": "مختص به سایت", - - "Content signing failed": "امضای محتوا با شکست مواجه شد", - "Content publish queued for {0:.0f} seconds.": "محتوا در صف انتشار با {0:.0f} ثانیه تاخیر قرار گرفت.", - "Content published to {0} peers.": "محتوا برای {0} تعداد همتا انتشار یافت.", - "No peers found, but your content is ready to access.": "همتایی یافت نشد، اما محتوای شما آماده دسترسی است.", - "Your network connection is restricted. Please, open {0} port": "ارتباط شبکه شما محدود‌شده است. لطفا درگاه {0} را", - "on your router to make your site accessible for everyone.": "در مسیریاب (روتر) خود باز کنید تا سایت خود را برای همه در دسترس قرار دهید.", - "Content publish failed.": "انتشار محتوا موفق نبود.", - "This file still in sync, if you write it now, then the previous content may be lost.": "این فایل همچنان همگام است، اگز شما آن را بنویسید، ممکن است محتوای قبلی از‌بین رود.", - "Write content anyway": "در هر صورت محتوا را بنویس", - "New certificate added:": "گواهی جدیدی افزوده شد:", - "You current certificate:": "گواهی فعلی شما:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "تغییرش بده به {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "گواهینامه به: {auth_type}/{auth_user_name}@{domain} تغییر پیدا کرد.", - "Site cloned": "سایت همسان‌سازی شد", - - "You have successfully changed the web interface's language!": "شما با موفقیت زبان رابط وب را تغییر دادید!", - "Due to the browser's caching, the full transformation could take some minute.": "به دلیل ذخیره‌سازی در مرور‌گر، امکان دارد تغییر شکل کامل چند دقیقه طول بکشد.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "اتصال با UiServer Websocket قطع شد. اتصال دوباره...", - "Connection with UiServer Websocket recovered.": "ارتباط با UiServer Websocket دوباره بر‌قرار شد.", - "UiServer Websocket error, please reload the page.": "خطای UiServer Websocket, لطفا صفحه را دوباره بارگیری کنید.", - "   Connecting...": "   برقراری ارتباط...", - "Site size: ": "حجم سایت: ", - "MB is larger than default allowed ": "MB بیشتر از پیش‌فرض مجاز است ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "سایت را باز کرده و محدوده حجم را به \" + site_info.next_size_limit + \"MB تنظیم کن", - " files needs to be downloaded": " فایل‌هایی که نیاز است، دانلود شوند", - " downloaded": " دانلود شد", - " download failed": " دانلود موفق نبود", - "Peers found: ": "چند همتا یافت شد: ", - "No peers found": "همتایی یافت نشد", - "Running out of size limit (": "عبور کرده از محدوده حجم (", - "Set limit to \" + site_info.next_size_limit + \"MB": "محدوده را به \" + site_info.next_size_limit + \"MB تنظیم کن", - "Site size limit changed to {0}MB": "محدوده حجم سایت به {0}MB تغییر کرد", - " New version of this page has just released.
    Reload to see the modified content.": " نسخه جدیدی از این صفحه منتشر شده است.
    برای مشاهده محتوای تغییر‌یافته دوباره بارگیری نمایید.", - "This site requests permission:": "این سایت درخواست مجوز می‌کند:", - "_(Accept)": "_(پذیرفتن)" -} diff --git a/src/Translate/languages/jp.json b/src/Translate/languages/jp.json index ff10aee4..9978acc7 100644 --- a/src/Translate/languages/jp.json +++ b/src/Translate/languages/jp.json @@ -1,66 +1,82 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "おめでとうございます。ポート {0} が開きました。これでZeroNetネットワークのメンバーです。", - "Tor mode active, every connection using Onion route.": "Torモードがアクティブです、全ての接続はOnionルートを使用します。", - "Successfully started Tor onion hidden services.": "Tor onionサービスを正常に開始しました。", - "Unable to start hidden services, please check your config.": "非表示のサービスを開始できません。設定を確認してください。", - "For faster connections open {0} port on your router.": "接続を高速化するにはルーターのポート {0} を開けてください。", - "Your connection is restricted. Please, open {0} port on your router": "接続が制限されています。ルーターのポート {0} を開けてください。", - "or configure Tor to become a full member of the ZeroNet network.": "または、TorをZeroNetネットワークのメンバーになるように設定してください。", + "Peers": "ピア", + "Connected": "接続済み", + "Connectable": "利用可能", + "Connectable peers": "ピアに接続可能", - "Select account you want to use in this site:": "このサイトで使用するアカウントを選択:", - "No certificate": "証明書がありません", - "currently selected": "現在選択中", - "Unique to site": "サイト固有", + "Data transfer": "データ転送", + "Received": "受信", + "Received bytes": "受信バイト数", + "Sent": "送信", + "Sent bytes": "送信バイト数", - "Content signing failed": "コンテンツの署名に失敗", - "Content publish queued for {0:.0f} seconds.": "コンテンツの公開は{0:.0f}秒のキューに入れられました。", - "Content published to {0}/{1} peers.": "サイトの更新を通知済 {0}/{1} ピア", - "Content published to {0} peers.": "{0}ピアに公開されたコンテンツ。", - "No peers found, but your content is ready to access.": "ピアは見つかりませんでしたが、コンテンツにアクセスする準備ができました。", - "Your network connection is restricted. Please, open {0} port": "ネットワーク接続が制限されています。ポート {0} を開いて、", - "on your router to make your site accessible for everyone.": "誰でもサイトにアクセスできるようにしてください。", - "Content publish failed.": "コンテンツの公開に失敗しました。", - "This file still in sync, if you write it now, then the previous content may be lost.": "このファイルはまだ同期しています。今すぐ書き込むと、前のコンテンツが失われる可能性があります。", - "Write content anyway": "とにかくコンテンツを書く", - "New certificate added:": "新しい証明書が追加されました:", - "You current certificate:": "現在の証明書:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} に変更", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "変更後の証明書: {auth_type}/{auth_user_name}@{domain}", - "Site cloned": "複製されたサイト", + "Files": "ファイル", + "Total": "合計", + "Image": "画像", + "Other": "その他", + "User data": "ユーザーデータ", - "You have successfully changed the web interface's language!": "Webインターフェースの言語が正常に変更されました!", - "Due to the browser's caching, the full transformation could take some minute.": "ブラウザのキャッシュにより、完全な変換には数分かかる場合があります。", + "Size limit": "サイズ制限", + "limit used": "使用上限", + "free space": "フリースペース", + "Set": "セット", - "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocketとの接続が失われました。再接続しています...", - "Connection with UiServer Websocket recovered.": "UiServer Websocketとの接続が回復しました。", - "UiServer Websocket error, please reload the page.": "UiServer Websocketエラー、ページをリロードしてください。", - "   Connecting...": "   接続しています...", - "Site size: ": "サイトサイズ: ", - "MB is larger than default allowed ": "MBはデフォルトの許容値よりも大きいです。 ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "サイトを開き、サイズ制限を \" + site_info.next_size_limit + \"MB に設定", - " files needs to be downloaded": " ファイルをダウンロードする必要があります", - " downloaded": " ダウンロード", - " download failed": " ダウンロード失敗", - "Peers found: ": "ピアが見つかりました: ", - "No peers found": "ピアが見つかりません", - "Running out of size limit (": "サイズ制限を使い果たしました (", - "Set limit to \" + site_info.next_size_limit + \"MB": "制限を \" + site_info.next_size_limit + \"MB に設定", - "Cloning site...": "サイトを複製中…", - "Site size limit changed to {0}MB": "サイトのサイズ制限が {0}MB に変更されました", - " New version of this page has just released.
    Reload to see the modified content.": " このページの新しいバージョンが公開されました。
    変更されたコンテンツを見るには再読み込みしてください。", - "This site requests permission:": "このサイトは権限を要求しています:", - "_(Accept)": "_(許可)", - - "Save": "保存", - "Trackers announcing": "トラッカーをお知らせ", - "Error": "エラー", - "Done": "完了", - "Tracker connection error detected.": "トラッカー接続エラーが検出されました。", + "Optional files": "オプション ファイル", + "Downloaded": "ダウンロード済み", + "Download and help distribute all files": "ダウンロードしてすべてのファイルの配布を支援する", + "Total size": "合計サイズ", + "Downloaded files": "ダウンロードされたファイル", - "Update ZeroNet client to latest version?": "ZeroNetクライアントを最新版に更新しますか?", + "Database": "データベース", + "search feeds": "フィードを検索する", + "{feeds} query": "{フィード} お問い合わせ", + "Reload": "再読込", + "Rebuild": "再ビルド", + "No database found": "データベースが見つかりません", + + "Identity address": "Identity address", + "Change": "編集", + + "Site control": "サイト管理", "Update": "更新", - "Restart ZeroNet client?": "ZeroNetクライアントを再起動しますか?", - "Restart": "再起動", - "Shut down ZeroNet client?": "ZeroNetクライアントを終了しますか?", - "Shut down": "終了" -} + "Pause": "一時停止", + "Resume": "再開", + "Delete": "削除", + "Are you sure?": "本当によろしいですか?", + + "Site address": "サイトアドレス", + "Donate": "寄付する", + + "Missing files": "ファイルがありません", + "{} try": "{} 試す", + "{} tries": "{} 試行", + "+ {num_bad_files} more": "+ {num_bad_files} more", + + "This is my site": "This is my site", + "Site title": "サイトタイトル", + "Site description": "サイトの説明", + "Save site settings": "サイトの設定を保存する", + + "Content publishing": "コンテンツを公開する", + "Choose": "選択", + "Sign": "Sign", + "Publish": "公開する", + + "This function is disabled on this proxy": "この機能はこのプロキシで無効になっています", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Cityデータベースのダウンロードエラー: {}!
    手動でダウンロードして、フォルダに解凍してください。:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Cityデータベースの読み込み (これは一度だけ行われます, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 Cityデータベースがダウンロードされました!", + + "Are you sure?": "本当によろしいですか?", + "Site storage limit modified!": "サイトの保存容量の制限が変更されました!", + "Database schema reloaded!": "データベーススキーマがリロードされました!", + "Database rebuilding....": "データベースの再構築中....", + "Database rebuilt!": "データベースが再構築されました!", + "Site updated!": "サイトが更新されました!", + "Delete this site": "このサイトを削除する", + "File write error: ": "ファイル書き込みエラー:", + "Site settings saved!": "サイト設定が保存されました!", + "Enter your private key:": "秘密鍵を入力してください:", + " Signed!": " Signed!", + "WebGL not supported": "WebGLはサポートされていません" +} \ No newline at end of file diff --git a/src/Translate/languages/pl.json b/src/Translate/languages/pl.json index 679e909d..75caeceb 100644 --- a/src/Translate/languages/pl.json +++ b/src/Translate/languages/pl.json @@ -13,8 +13,8 @@ "Content signing failed": "Podpisanie treści zawiodło", "Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).", - "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników.", - "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników, ale twoja treść jest dostępna.", + "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników równorzednych.", + "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników równorzędnych, ale twoja treść jest dostępna.", "Your network connection is restricted. Please, open {0} port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwórz port {0}", "on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.", "Content publish failed.": "Publikacja treści zawiodła.", @@ -39,16 +39,13 @@ " files needs to be downloaded": " pliki muszą zostać ściągnięte", " downloaded": " ściągnięte", " download failed": " ściąganie nie powiodło się", - "Peers found: ": "Odnaleziono użytkowników: ", - "No peers found": "Nie odnaleziono użytkowników", + "Peers found: ": "Odnaleziono użytkowników równorzednych: ", + "No peers found": "Nie odnaleziono użytkowników równorzędnych", "Running out of size limit (": "Limit rozmiaru na wyczerpaniu (", "Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBów", "Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBów", " New version of this page has just released.
    Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana.
    Odśwież by zobaczyć nową, zmodyfikowaną treść strony.", "This site requests permission:": "Ta strona wymaga uprawnień:", - "_(Accept)": "Przyznaj uprawnienia", + "_(Accept)": "Przyznaj uprawnienia" - "Sign and publish": "Podpisz i opublikuj", - "Restart ZeroNet client?": "Uruchomić ponownie klienta ZeroNet?", - "Restart": "Uruchom ponownie" } diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 4a4e0545..9c2376a3 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -5,7 +5,6 @@ import mimetypes import json import html import urllib -import socket import gevent @@ -26,23 +25,6 @@ status_texts = { 500: "500 Internal Server Error", } -content_types = { - "asc": "application/pgp-keys", - "css": "text/css", - "gpg": "application/pgp-encrypted", - "html": "text/html", - "js": "application/javascript", - "json": "application/json", - "oga": "audio/ogg", - "ogg": "application/ogg", - "ogv": "video/ogg", - "sig": "application/pgp-signature", - "txt": "text/plain", - "webmanifest": "application/manifest+json", - "wasm": "application/wasm", - "webp": "image/webp" -} - class SecurityError(Exception): pass @@ -86,19 +68,13 @@ class UiRequest(object): return True if self.isProxyRequest(): # Support for chrome extension proxy - if self.isDomain(host): + if self.server.site_manager.isDomain(host): return True else: return False return False - def isDomain(self, address): - return self.server.site_manager.isDomainCached(address) - - def resolveDomain(self, domain): - return self.server.site_manager.resolveDomainCached(domain) - # Call the request handler function base on path def route(self, path): # Restict Ui access by ip @@ -107,25 +83,10 @@ class UiRequest(object): # Check if host allowed to do request if not self.isHostAllowed(self.env.get("HTTP_HOST")): - ret_error = next(self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False)) - - http_get = self.env["PATH_INFO"] - if self.env["QUERY_STRING"]: - http_get += "?{0}".format(self.env["QUERY_STRING"]) - self_host = self.env["HTTP_HOST"].split(":")[0] - self_ip = self.env["HTTP_HOST"].replace(self_host, socket.gethostbyname(self_host)) - link = "http://{0}{1}".format(self_ip, http_get) - ret_body = """ -

    Start the client with --ui_host "{host}" argument

    -

    or access via ip: {link}

    - """.format( - host=html.escape(self.env["HTTP_HOST"]), - link=html.escape(link) - ).encode("utf8") - return iter([ret_error, ret_body]) + return self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False) # Prepend .bit host for transparent proxy - if self.isDomain(self.env.get("HTTP_HOST")): + if self.server.site_manager.isDomain(self.env.get("HTTP_HOST")): path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path) path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access @@ -148,8 +109,8 @@ class UiRequest(object): if path == "/": return self.actionIndex() - elif path in ("/favicon.ico", "/apple-touch-icon.png"): - return self.actionFile("src/Ui/media/img/%s" % path) + elif path == "/favicon.ico": + return self.actionFile("src/Ui/media/img/favicon.ico") # Internal functions elif "/ZeroNet-Internal/" in path: path = re.sub(".*?/ZeroNet-Internal/", "/", path) @@ -202,7 +163,7 @@ class UiRequest(object): # The request is proxied by chrome extension or a transparent proxy def isProxyRequest(self): - return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.isDomain(self.env.get("HTTP_HOST"))) + return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.server.site_manager.isDomain(self.env.get("HTTP_HOST"))) def isWebSocketRequest(self): return self.env.get("HTTP_UPGRADE") == "websocket" @@ -213,19 +174,21 @@ class UiRequest(object): # Get mime by filename def getContentType(self, file_name): file_name = file_name.lower() - ext = file_name.rsplit(".", 1)[-1] + content_type = mimetypes.guess_type(file_name)[0] - if ext in content_types: - content_type = content_types[ext] - elif ext in ("ttf", "woff", "otf", "woff2", "eot", "sfnt", "collection"): - content_type = "font/%s" % ext - else: - content_type = mimetypes.guess_type(file_name)[0] + if content_type: + content_type = content_type.lower() + if file_name.endswith(".css"): # Force correct css content type + content_type = "text/css" + if file_name.endswith(".js"): # Force correct javascript content type + content_type = "text/javascript" + if file_name.endswith(".json"): # Correct json header + content_type = "application/json" if not content_type: content_type = "application/octet-stream" - return content_type.lower() + return content_type # Return: Posted variables def getPosted(self): @@ -288,11 +251,13 @@ class UiRequest(object): headers["X-Frame-Options"] = "SAMEORIGIN" if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()): headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css + if content_type == "text/javascript" and not self.env.get("HTTP_REFERER"): + headers["Access-Control-Allow-Origin"] = "*" # Allow loading JavaScript modules in Chrome if noscript: - headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';" + headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src 'self'; font-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline';" elif script_nonce and self.isScriptNonceSupported(): - headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self' blob: data:; style-src 'self' blob: 'unsafe-inline'; connect-src *; frame-src 'self' blob:".format(script_nonce) + headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self'; style-src 'self' 'unsafe-inline'; connect-src *; frame-src 'self'".format(script_nonce) if allow_ajax: headers["Access-Control-Allow-Origin"] = "null" @@ -302,19 +267,20 @@ class UiRequest(object): headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range" headers["Access-Control-Allow-Credentials"] = "true" + if content_type == "text/html": + content_type = "text/html; charset=utf-8" + if content_type == "text/plain": + content_type = "text/plain; charset=utf-8" + # Download instead of display file types that can be dangerous if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type): headers["Content-Disposition"] = "attachment" cacheable_type = ( - self.env["REQUEST_METHOD"] == "OPTIONS" or - content_type.split("/", 1)[0] in ("image", "video", "font") or - content_type in ("application/javascript", "text/css") + content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or + self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript" ) - if content_type in ("text/plain", "text/html", "text/css", "application/javascript", "application/json", "application/manifest+json"): - content_type += "; charset=utf-8" - if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min headers["Cache-Control"] = "public, max-age=600" # Cache 10 min else: @@ -326,55 +292,19 @@ class UiRequest(object): # Renders a template def render(self, template_path, *args, **kwargs): template = open(template_path, encoding="utf8").read() - - def renderReplacer(m): - if m.group(1) in kwargs: - return "%s" % kwargs.get(m.group(1), "") - else: - return m.group(0) - - template_rendered = re.sub("{(.*?)}", renderReplacer, template) - - return template_rendered.encode("utf8") - - def isWrapperNecessary(self, path): - match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) - - if not match: - return True - - inner_path = match.group("inner_path").lstrip("/") - if not inner_path or path.endswith("/"): # It's a directory - content_type = self.getContentType("index.html") - else: # It's a file - content_type = self.getContentType(inner_path) - - is_html_file = "html" in content_type or "xhtml" in content_type - - return is_html_file - - @helper.encodeResponse - def formatRedirect(self, url): - return """ - - - Redirecting to {0} - - - - """.format(html.escape(url)) + for key, val in list(kwargs.items()): + template = template.replace("{%s}" % key, "%s" % val) + return template.encode("utf8") # - Actions - # Redirect to an url def actionRedirect(self, url): self.start_response('301 Redirect', [('Location', str(url))]) - yield self.formatRedirect(url) + yield b"Location changed: " + url.encode("utf8") def actionIndex(self): - return self.actionRedirect("/" + config.homepage + "/") + return self.actionRedirect("/" + config.homepage) # Render a file from media with iframe site wrapper def actionWrapper(self, path, extra_headers=None): @@ -382,13 +312,20 @@ class UiRequest(object): extra_headers = {} script_nonce = self.getScriptNonce() - match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) + match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) just_added = False if match: address = match.group("address") inner_path = match.group("inner_path").lstrip("/") - if not self.isWrapperNecessary(path): + if not inner_path or path.endswith("/"): # It's a directory + content_type = self.getContentType("index.html") + else: # It's a file + content_type = self.getContentType(inner_path) + + is_html_file = "html" in content_type or "xhtml" in content_type + + if not is_html_file: return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper if self.isAjaxRequest(): @@ -398,7 +335,7 @@ class UiRequest(object): return self.error403("WebSocket request not allowed to load wrapper") # No websocket if "text/html" not in self.env.get("HTTP_ACCEPT", ""): - return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", "")) + return self.error403("Invalid Accept header to load wrapper") if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""): return self.error403("Prefetch not allowed to load wrapper") @@ -439,16 +376,6 @@ class UiRequest(object): else: return "/" + address - def getWsServerUrl(self): - if self.isProxyRequest(): - if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 - server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] - else: # Remote client, use SERVER_NAME as server's real address - server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) - else: - server_url = "" - return server_url - def processQueryString(self, site, query_string): match = re.search("zeronet_peers=(.*?)(&|$)", query_string) if match: @@ -485,9 +412,6 @@ class UiRequest(object): file_url = "/" + address + "/" + inner_path root_url = "/" + address + "/" - if self.isProxyRequest(): - self.server.allowed_ws_origins.add(self.env["HTTP_HOST"]) - # Wrapper variable inits body_style = "" meta_tags = "" @@ -496,23 +420,23 @@ class UiRequest(object): wrapper_nonce = self.getWrapperNonce() inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", "")) - if "?" in inner_path: - sep = "&" - else: - sep = "?" - if inner_query_string: - inner_query_string = "%s%s&wrapper_nonce=%s" % (sep, inner_query_string, wrapper_nonce) + inner_query_string = "?%s&wrapper_nonce=%s" % (inner_query_string, wrapper_nonce) + elif "?" in inner_path: + inner_query_string = "&wrapper_nonce=%s" % wrapper_nonce else: - inner_query_string = "%swrapper_nonce=%s" % (sep, wrapper_nonce) + inner_query_string = "?wrapper_nonce=%s" % wrapper_nonce if self.isProxyRequest(): # Its a remote proxy request + if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 + server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] + else: # Remote client, use SERVER_NAME as server's real address + server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) homepage = "http://zero/" + config.homepage else: # Use relative path + server_url = "" homepage = "/" + config.homepage - server_url = self.getWsServerUrl() # Real server url for WS connections - user = self.getCurrentUser() if user: theme = user.settings.get("theme", "light") @@ -540,40 +464,18 @@ class UiRequest(object): if show_loadingscreen is None: show_loadingscreen = not site.storage.isFile(file_inner_path) - - if show_loadingscreen: - meta_tags += ''; - - def xescape(s): - '''combines parts from re.escape & html.escape''' - # https://github.com/python/cpython/blob/3.10/Lib/re.py#L267 - # '&' is handled otherwise - re_chars = {i: '\\' + chr(i) for i in b'()[]{}*+-|^$\\.~# \t\n\r\v\f'} - # https://github.com/python/cpython/blob/3.10/Lib/html/__init__.py#L12 - html_chars = { - '<' : '<', - '>' : '>', - '"' : '"', - "'" : ''', - } - # we can't replace '&' because it makes certain zites work incorrectly - # it should however in no way interfere with re.sub in render - repl = {} - repl.update(re_chars) - repl.update(html_chars) - return s.translate(repl) return self.render( "src/Ui/template/wrapper.html", server_url=server_url, inner_path=inner_path, - file_url=xescape(file_url), - file_inner_path=xescape(file_inner_path), + file_url=re.escape(file_url), + file_inner_path=re.escape(file_inner_path), address=site.address, - title=xescape(title), + title=html.escape(title), body_style=body_style, meta_tags=meta_tags, - query_string=xescape(inner_query_string), + query_string=re.escape(inner_query_string), wrapper_key=site.settings["wrapper_key"], ajax_key=site.settings["ajax_key"], wrapper_nonce=wrapper_nonce, @@ -609,18 +511,8 @@ class UiRequest(object): def isSameOrigin(self, url_a, url_b): if not url_a or not url_b: return False - - url_a = url_a.replace("/raw/", "/") - url_b = url_b.replace("/raw/", "/") - - origin_pattern = "http[s]{0,1}://(.*?/.*?/).*" - is_origin_full = re.match(origin_pattern, url_a) - if not is_origin_full: # Origin looks trimmed to host, require only same host - origin_pattern = "http[s]{0,1}://(.*?/).*" - - origin_a = re.sub(origin_pattern, "\\1", url_a) - origin_b = re.sub(origin_pattern, "\\1", url_b) - + origin_a = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_a) + origin_b = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_b) return origin_a == origin_b # Return {address: 1Site.., inner_path: /data/users.json} from url path @@ -630,14 +522,12 @@ class UiRequest(object): if path.endswith("/"): path = path + "index.html" - if "../" in path or "./" in path: + if ".." in path or "./" in path: raise SecurityError("Invalid path") - match = re.match(r"/media/(?P
    [A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) + match = re.match("/media/(?P
    [A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) if match: path_parts = match.groupdict() - if self.isDomain(path_parts["address"]): - path_parts["address"] = self.resolveDomain(path_parts["address"]) path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites) path_parts["inner_path"] = path_parts["inner_path"].lstrip("/") if not path_parts["inner_path"]: @@ -657,10 +547,9 @@ class UiRequest(object): return self.error404(path) address = path_parts["address"] - file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) - if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."): + if config.debug and file_path.split("/")[-1].startswith("all."): # If debugging merge *.css to all.css and *.js to all.js site = self.server.sites.get(address) if site and site.settings["own"]: @@ -704,7 +593,7 @@ class UiRequest(object): return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) else: self.log.debug("File not found: %s" % path_parts["inner_path"]) - return self.error404(path) + return self.error404(path_parts["inner_path"]) # Serve a media for ui def actionUiMedia(self, path): @@ -712,11 +601,11 @@ class UiRequest(object): if match: # Looks like a valid path file_path = "src/Ui/media/%s" % match.group("inner_path") allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed - if "../" in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): + if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path return self.error403() else: - if (config.debug or config.merge_media) and match.group("inner_path").startswith("all."): + if config.debug and match.group("inner_path").startswith("all."): # If debugging merge *.css to all.css and *.js to all.js from Debug import DebugMedia DebugMedia.merge(file_path) @@ -726,15 +615,13 @@ class UiRequest(object): return self.error400() def actionSiteAdd(self): - post_data = self.env["wsgi.input"].read().decode() - post = dict(urllib.parse.parse_qsl(post_data)) + post = dict(urllib.parse.parse_qsl(self.env["wsgi.input"].read())) if post["add_nonce"] not in self.server.add_nonces: return self.error403("Add nonce error.") self.server.add_nonces.remove(post["add_nonce"]) SiteManager.site_manager.need(post["address"]) return self.actionRedirect(post["url"]) - @helper.encodeResponse def actionSiteAddPrompt(self, path): path_parts = self.parsePath(path) if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]): @@ -749,10 +636,7 @@ class UiRequest(object): def replaceHtmlVariables(self, block, path_parts): user = self.getCurrentUser() - if user and user.settings: - themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) - else: - themeclass = "theme-light" + themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) block = block.replace(b"{themeclass}", themeclass.encode("utf8")) if path_parts: @@ -766,7 +650,7 @@ class UiRequest(object): return block # Stream a file to client - def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, extra_headers={}, file_size=None, file_obj=None, path_parts=None): + def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, file_size=None, file_obj=None, path_parts=None): file_name = os.path.basename(file_path) if file_size is None: @@ -784,10 +668,7 @@ class UiRequest(object): header_length = False if send_header: - extra_headers = extra_headers.copy() - content_encoding = self.get.get("zeronet_content_encoding", "") - if all(part.strip() in ("gzip", "compress", "deflate", "identity", "br") for part in content_encoding.split(",")): - extra_headers["Content-Encoding"] = content_encoding + extra_headers = {} extra_headers["Accept-Ranges"] = "bytes" if header_length: extra_headers["Content-Length"] = str(file_size) @@ -829,21 +710,9 @@ class UiRequest(object): # On websocket connection def actionWebsocket(self): ws = self.env.get("wsgi.websocket") - if ws: - # Allow only same-origin websocket requests - origin = self.env.get("HTTP_ORIGIN") - host = self.env.get("HTTP_HOST") - # Allow only same-origin websocket requests - if origin: - origin_host = origin.split("://", 1)[-1] - if origin_host != host and origin_host not in self.server.allowed_ws_origins: - error_message = "Invalid origin: %s (host: %s, allowed: %s)" % (origin, host, self.server.allowed_ws_origins) - ws.send(json.dumps({"error": error_message})) - return self.error403(error_message) - - # Find site by wrapper_key wrapper_key = self.get["wrapper_key"] + # Find site by wrapper_key site = None for site_check in list(self.server.sites.values()): if site_check.settings["wrapper_key"] == wrapper_key: @@ -867,7 +736,7 @@ class UiRequest(object): # Remove websocket from every site (admin sites allowed to join other sites event channels) if ui_websocket in site_check.websockets: site_check.websockets.remove(ui_websocket) - return [b"Bye."] + return "Bye." else: # No site found by wrapper key ws.send(json.dumps({"error": "Wrapper key not found: %s" % wrapper_key})) return self.error403("Wrapper key not found: %s" % wrapper_key) @@ -925,7 +794,7 @@ class UiRequest(object): # You are not allowed to access this def error403(self, message="", details=True): self.sendHeader(403, noscript=True) - self.log.warning("Error 403: %s" % message) + self.log.error("Error 403: %s" % message) return self.formatError("Forbidden", message, details=details) # Send file not found error @@ -965,10 +834,6 @@ class UiRequest(object): """ % (title, html.escape(message), html.escape(json.dumps(details, indent=4, sort_keys=True))) else: return """ -

    %s

    %s

    """ % (title, html.escape(message)) diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py index 61943ada..9daa90b1 100644 --- a/src/Ui/UiServer.py +++ b/src/Ui/UiServer.py @@ -1,11 +1,13 @@ import logging import time -import urllib +import cgi import socket +import sys import gevent from gevent.pywsgi import WSGIServer -from lib.gevent_ws import WebSocketHandler +from gevent.pywsgi import WSGIHandler +from geventwebsocket.handler import WebSocketHandler from .UiRequest import UiRequest from Site import SiteManager @@ -15,7 +17,7 @@ import importlib # Skip websocket handler if not necessary -class UiWSGIHandler(WebSocketHandler): +class UiWSGIHandler(WSGIHandler): def __init__(self, *args, **kwargs): self.server = args[2] @@ -23,25 +25,25 @@ class UiWSGIHandler(WebSocketHandler): self.args = args self.kwargs = kwargs - def handleError(self, err): - if config.debug: # Allow websocket errors to appear on /Debug - import main - main.DebugHook.handleError() - else: - ui_request = UiRequest(self.server, {}, self.environ, self.start_response) - block_gen = ui_request.error500("UiWSGIHandler error: %s" % Debug.formatExceptionMessage(err)) - for block in block_gen: - self.write(block) - def run_application(self): - err_name = "UiWSGIHandler websocket" if "HTTP_UPGRADE" in self.environ else "UiWSGIHandler" - try: - super(UiWSGIHandler, self).run_application() - except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as err: - logging.warning("%s connection error: %s" % (err_name, err)) - except Exception as err: - logging.warning("%s error: %s" % (err_name, Debug.formatException(err))) - self.handleError(err) + if "HTTP_UPGRADE" in self.environ: # Websocket request + try: + ws_handler = WebSocketHandler(*self.args, **self.kwargs) + ws_handler.__dict__ = self.__dict__ # Match class variables + ws_handler.run_application() + except Exception as err: + logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err)) + if config.debug: # Allow websocket errors to appear on /Debug + import main + main.DebugHook.handleError() + else: # Standard HTTP request + try: + super(UiWSGIHandler, self).run_application() + except Exception as err: + logging.error("UiWSGIHandler error: %s" % Debug.formatException(err)) + if config.debug: # Allow websocket errors to appear on /Debug + import main + main.DebugHook.handleError() def handle(self): # Save socket to be able to close them properly on exit @@ -51,6 +53,7 @@ class UiWSGIHandler(WebSocketHandler): class UiServer: + def __init__(self): self.ip = config.ui_ip self.port = config.ui_port @@ -59,7 +62,6 @@ class UiServer: self.ip = "0.0.0.0" # Bind all if config.ui_host: self.allowed_hosts = set(config.ui_host) - #TODO: For proxies allow sub domains(www) as valid hosts, should be user preference. elif config.ui_ip == "127.0.0.1": # IP Addresses are inherently allowed as they are immune to DNS # rebinding attacks. @@ -74,7 +76,6 @@ class UiServer: self.allowed_hosts.update(["localhost"]) else: self.allowed_hosts = set([]) - self.allowed_ws_origins = set() self.allow_trans_proxy = config.ui_trans_proxy self.wrapper_nonces = [] @@ -83,10 +84,6 @@ class UiServer: self.site_manager = SiteManager.site_manager self.sites = SiteManager.site_manager.list() self.log = logging.getLogger(__name__) - config.error_logger.onNewRecord = self.handleErrorLogRecord - - def handleErrorLogRecord(self, record): - self.updateWebsocket(log_event=record.levelname) # After WebUI started def afterStarted(self): @@ -97,7 +94,7 @@ class UiServer: def handleRequest(self, env, start_response): path = bytes(env["PATH_INFO"], "raw-unicode-escape").decode("utf8") if env.get("QUERY_STRING"): - get = dict(urllib.parse.parse_qsl(env['QUERY_STRING'])) + get = dict(cgi.parse_qsl(env['QUERY_STRING'])) else: get = {} ui_request = UiRequest(self, get, env, start_response) @@ -198,10 +195,5 @@ class UiServer: time.sleep(1) def updateWebsocket(self, **kwargs): - if kwargs: - param = {"event": list(kwargs.items())[0]} - else: - param = None - for ws in self.websockets: - ws.event("serverChanged", param) + ws.event("serverChanged", kwargs) \ No newline at end of file diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 2f982e1d..e1df4366 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -6,7 +6,6 @@ import shutil import re import copy import logging -import stat import gevent @@ -19,12 +18,18 @@ from Plugin import PluginManager from Translate import translate as _ from util import helper from util import SafeRe -from util.Flag import flag from Content.ContentManager import VerifyError, SignError @PluginManager.acceptPlugins class UiWebsocket(object): + admin_commands = set([ + "sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteAdd", "siteListModifiedFiles", "siteSetSettingsValue", + "channelJoinAllsite", "serverUpdate", "serverPortcheck", "serverShutdown", "serverShowdirectory", "serverGetWrapperNonce", + "certSet", "certList", "configSet", "permissionAdd", "permissionRemove", "announcerStats", "userSetGlobalSettings" + ]) + async_commands = set(["fileGet", "fileList", "dirList", "fileNeed", "serverPortcheck", "siteListModifiedFiles"]) + def __init__(self, ws, site, server, user, request): self.ws = ws self.site = site @@ -86,11 +91,6 @@ class UiWebsocket(object): if not self.hasPlugin("Multiuser"): self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) - self.onClosed() - - def onClosed(self): - pass - def dedent(self, text): return re.sub("[\\r\\n\\x20\\t]+", " ", text.strip().replace("
    ", " ")) @@ -107,13 +107,59 @@ class UiWebsocket(object): "Please check your configuration.") ]) + import main + file_server = main.file_server + if any(file_server.port_opened.values()): + self.site.notifications.append([ + "done", + _["Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!"].format(config.fileserver_port), + 10000 + ]) + elif config.tor == "always" and file_server.tor_manager.start_onions: + self.site.notifications.append([ + "done", + _(""" + {_[Tor mode active, every connection using Onion route.]}
    + {_[Successfully started Tor onion hidden services.]} + """), + 10000 + ]) + elif config.tor == "always" and file_server.tor_manager.start_onions is not False: + self.site.notifications.append([ + "error", + _(""" + {_[Tor mode active, every connection using Onion route.]}
    + {_[Unable to start hidden services, please check your config.]} + """), + 0 + ]) + elif file_server.tor_manager.start_onions: + self.site.notifications.append([ + "done", + _(""" + {_[Successfully started Tor onion hidden services.]}
    + {_[For faster connections open {0} port on your router.]} + """).format(config.fileserver_port), + 10000 + ]) + else: + self.site.notifications.append([ + "error", + _(""" + {_[Your connection is restricted. Please, open {0} port on your router]}
    + {_[or configure Tor to become a full member of the ZeroNet network.]} + """).format(config.fileserver_port), + 0 + ]) + def hasPlugin(self, name): return name in PluginManager.plugin_manager.plugin_names # Has permission to run the command def hasCmdPermission(self, cmd): - flags = flag.db.get(self.getCmdFuncName(cmd), ()) - if "admin" in flags and "ADMIN" not in self.permissions: + cmd = cmd[0].lower() + cmd[1:] + + if cmd in self.admin_commands and "ADMIN" not in self.permissions: return False else: return True @@ -140,8 +186,6 @@ class UiWebsocket(object): self.cmd("setSiteInfo", site_info) elif channel == "serverChanged": server_info = self.formatServerInfo() - if len(params) > 0 and params[0]: # Extra data - server_info.update(params[0]) self.cmd("setServerInfo", server_info) elif channel == "announcerChanged": site = params[0] @@ -201,10 +245,6 @@ class UiWebsocket(object): gevent.spawn(asyncErrorWatcher, func, *args, **kwargs) return wrapper - def getCmdFuncName(self, cmd): - func_name = "action" + cmd[0].upper() + cmd[1:] - return func_name - # Handle incoming messages def handleRequest(self, req): @@ -214,21 +254,17 @@ class UiWebsocket(object): if cmd == "response": # It's a response to a command return self.actionResponse(req["to"], req["result"]) + elif not self.hasCmdPermission(cmd): # Admin commands + return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) else: # Normal command - func_name = self.getCmdFuncName(cmd) + func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) - if self.site.settings.get("deleting"): - return self.response(req["id"], {"error": "Site is deleting"}) - if not func: # Unknown command - return self.response(req["id"], {"error": "Unknown command: %s" % cmd}) - - if not self.hasCmdPermission(cmd): # Admin commands - return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) + self.response(req["id"], {"error": "Unknown command: %s" % cmd}) + return # Execute in parallel - func_flags = flag.db.get(self.getCmdFuncName(cmd), ()) - if func_flags and "async_run" in func_flags: + if cmd in self.async_commands: func = self.asyncWrapper(func) # Support calling as named, unnamed parameters and raw first argument too @@ -261,13 +297,13 @@ class UiWebsocket(object): settings = site.settings.copy() del settings["wrapper_key"] # Dont expose wrapper key + del settings["auth_key"] # Dont send auth key twice ret = { + "auth_key": self.site.settings["auth_key"], # Obsolete, will be removed "auth_address": self.user.getAuthAddress(site.address, create=create_user), "cert_user_id": self.user.getCertUserId(site.address), "address": site.address, - "address_short": site.address_short, - "address_hash": site.address_hash.hex(), "settings": settings, "content_updated": site.content_updated, "bad_files": len(site.bad_files), @@ -311,7 +347,6 @@ class UiWebsocket(object): "debug": config.debug, "offline": config.offline, "plugins": PluginManager.plugin_manager.plugin_names, - "plugins_rev": PluginManager.plugin_manager.plugins_rev, "user_settings": self.user.settings } if "ADMIN" in self.site.settings["permissions"]: @@ -327,10 +362,7 @@ class UiWebsocket(object): def actionAs(self, to, address, cmd, params=[]): if not self.hasSitePermission(address, cmd=cmd): - #TODO! Return this as error ? return self.response(to, "No permission for site %s" % address) - if not self.server.sites.get(address): - return self.response(to, {"error": "Site Does Not Exist: %s" % address}) req_self = copy.copy(self) req_self.site = self.server.sites.get(address) req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site @@ -369,15 +401,12 @@ class UiWebsocket(object): if channel not in self.channels: self.channels.append(channel) - self.response(to, "ok") - # Server variables def actionServerInfo(self, to): back = self.formatServerInfo() self.response(to, back) # Create a new wrapper nonce that allows to load html file - @flag.admin def actionServerGetWrapperNonce(self, to): wrapper_nonce = self.request.getWrapperNonce() self.response(to, wrapper_nonce) @@ -386,7 +415,6 @@ class UiWebsocket(object): back = self.formatAnnouncerInfo(self.site) self.response(to, back) - @flag.admin def actionAnnouncerStats(self, to): back = {} trackers = self.site.announcer.getTrackers() @@ -422,15 +450,10 @@ class UiWebsocket(object): is_user_content = file_info and ("cert_signers" in file_info or "cert_signers_pattern" in file_info) if is_user_content and privatekey is None: cert = self.user.getCert(self.site.address) - if not cert: - error = "Site sign failed: No certificate selected for Site: %s, Hence Signing inner_path: %s Failed, Try Adding/Selecting User Cert via Site Login" % (self.site.address, inner_path) - self.log.error(error) - return self.response(to, {"error": error}) - else: - extend["cert_auth_type"] = cert["auth_type"] - extend["cert_user_id"] = self.user.getCertUserId(site.address) - extend["cert_sign"] = cert["cert_sign"] - self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) + extend["cert_auth_type"] = cert["auth_type"] + extend["cert_user_id"] = self.user.getCertUserId(site.address) + extend["cert_sign"] = cert["cert_sign"] + self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) if not self.hasFilePermission(inner_path): self.log.error("SiteSign error: you don't own this site & site owner doesn't allow you to do so.") @@ -518,7 +541,7 @@ class UiWebsocket(object): progress ]) diffs = site.content_manager.getDiffs(inner_path) - back = site.publish(limit=10, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) + back = site.publish(limit=5, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) if back == 0: # Failed to publish to anyone self.cmd("progress", ["publish", _["Content publish failed."], -100]) else: @@ -636,7 +659,7 @@ class UiWebsocket(object): self.site.storage.delete(inner_path) except Exception as err: self.log.error("File delete error: %s" % err) - return self.response(to, {"error": "Delete error: %s" % Debug.formatExceptionMessage(err)}) + return self.response(to, {"error": "Delete error: %s" % err}) self.response(to, "ok") @@ -654,7 +677,6 @@ class UiWebsocket(object): return self.response(to, rows) # List files in directory - @flag.async_run def actionFileList(self, to, inner_path): try: return list(self.site.storage.walk(inner_path)) @@ -663,20 +685,9 @@ class UiWebsocket(object): return {"error": Debug.formatExceptionMessage(err)} # List directories in a directory - @flag.async_run - def actionDirList(self, to, inner_path, stats=False): + def actionDirList(self, to, inner_path): try: - if stats: - back = [] - for file_name in self.site.storage.list(inner_path): - file_stats = os.stat(self.site.storage.getPath(inner_path + "/" + file_name)) - is_dir = stat.S_ISDIR(file_stats.st_mode) - back.append( - {"name": file_name, "size": file_stats.st_size, "is_dir": is_dir} - ) - return back - else: - return list(self.site.storage.list(inner_path)) + return list(self.site.storage.list(inner_path)) except Exception as err: self.log.error("dirList %s error: %s" % (inner_path, Debug.formatException(err))) return {"error": Debug.formatExceptionMessage(err)} @@ -689,7 +700,7 @@ class UiWebsocket(object): try: res = self.site.storage.query(query, params) except Exception as err: # Response the error to client - self.log.error("DbQuery error: %s" % Debug.formatException(err)) + self.log.error("DbQuery error: %s" % err) return self.response(to, {"error": Debug.formatExceptionMessage(err)}) # Convert result to dict for row in res: @@ -699,15 +710,14 @@ class UiWebsocket(object): return self.response(to, rows) # Return file content - @flag.async_run - def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300, priority=6): + def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300): try: if required or inner_path in self.site.bad_files: with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=priority) + self.site.needFile(inner_path, priority=6) body = self.site.storage.read(inner_path, "rb") except (Exception, gevent.Timeout) as err: - self.log.debug("%s fileGet error: %s" % (inner_path, Debug.formatException(err))) + self.log.error("%s fileGet error: %s" % (inner_path, Debug.formatException(err))) body = None if not body: @@ -716,18 +726,14 @@ class UiWebsocket(object): import base64 body = base64.b64encode(body).decode() else: - try: - body = body.decode() - except Exception as err: - self.response(to, {"error": "Error decoding text: %s" % err}) + body = body.decode() self.response(to, body) - @flag.async_run - def actionFileNeed(self, to, inner_path, timeout=300, priority=6): + def actionFileNeed(self, to, inner_path, timeout=300): try: with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=priority) - except (Exception, gevent.Timeout) as err: + self.site.needFile(inner_path, priority=6) + except Exception as err: return self.response(to, {"error": Debug.formatExceptionMessage(err)}) return self.response(to, "ok") @@ -849,7 +855,6 @@ class UiWebsocket(object): # - Admin actions - - @flag.admin def actionPermissionAdd(self, to, permission): if permission not in self.site.settings["permissions"]: self.site.settings["permissions"].append(permission) @@ -857,14 +862,12 @@ class UiWebsocket(object): self.site.updateWebsocket(permission_added=permission) self.response(to, "ok") - @flag.admin def actionPermissionRemove(self, to, permission): self.site.settings["permissions"].remove(permission) self.site.saveSettings() self.site.updateWebsocket(permission_removed=permission) self.response(to, "ok") - @flag.admin def actionPermissionDetails(self, to, permission): if permission == "ADMIN": self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") @@ -876,14 +879,12 @@ class UiWebsocket(object): self.response(to, "") # Set certificate that used for authenticate user for site - @flag.admin def actionCertSet(self, to, domain): self.user.setCert(self.site.address, domain) self.site.updateWebsocket(cert_changed=domain) self.response(to, "ok") # List user's certificates - @flag.admin def actionCertList(self, to): back = [] auth_address = self.user.getAuthAddress(self.site.address) @@ -898,9 +899,9 @@ class UiWebsocket(object): return back # List all site info - @flag.admin def actionSiteList(self, to, connecting_sites=False): ret = [] + SiteManager.site_manager.load() # Reload sites for site in list(self.server.sites.values()): if not site.content_manager.contents.get("content.json") and not connecting_sites: continue # Incomplete site @@ -908,7 +909,6 @@ class UiWebsocket(object): self.response(to, ret) # Join to an event channel on all sites - @flag.admin def actionChannelJoinAllsite(self, to, channel): if channel not in self.channels: # Add channel to channels self.channels.append(channel) @@ -917,8 +917,6 @@ class UiWebsocket(object): if self not in site.websockets: site.websockets.append(self) - self.response(to, "ok") - # Update site content.json def actionSiteUpdate(self, to, address, check_files=False, since=None, announce=False): def updateThread(): @@ -936,7 +934,6 @@ class UiWebsocket(object): self.response(to, {"error": "Unknown site: %s" % address}) # Pause site serving - @flag.admin def actionSitePause(self, to, address): site = self.server.sites.get(address) if site: @@ -949,7 +946,6 @@ class UiWebsocket(object): self.response(to, {"error": "Unknown site: %s" % address}) # Resume site serving - @flag.admin def actionSiteResume(self, to, address): site = self.server.sites.get(address) if site: @@ -962,8 +958,6 @@ class UiWebsocket(object): else: self.response(to, {"error": "Unknown site: %s" % address}) - @flag.admin - @flag.no_multiuser def actionSiteDelete(self, to, address): site = self.server.sites.get(address) if site: @@ -993,14 +987,13 @@ class UiWebsocket(object): new_site.settings["own"] = True new_site.saveSettings() self.cmd("notification", ["done", _["Site cloned"]]) - if redirect: + if redirect : self.cmd("redirect", "/%s" % new_address) gevent.spawn(new_site.announce) response = {"address": new_address} self.response(to, response) return "ok" - @flag.no_multiuser def actionSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): if not SiteManager.site_manager.isAddress(address): self.response(to, {"error": "Not a site: %s" % address}) @@ -1027,8 +1020,6 @@ class UiWebsocket(object): lambda res: self.cbSiteClone(to, address, root_inner_path, target_address, redirect) ) - @flag.admin - @flag.no_multiuser def actionSiteSetLimit(self, to, size_limit): self.site.settings["size_limit"] = int(size_limit) self.site.saveSettings() @@ -1036,7 +1027,6 @@ class UiWebsocket(object): self.site.updateWebsocket() self.site.download(blind_includes=True) - @flag.admin def actionSiteAdd(self, to, address): site_manager = SiteManager.site_manager if address in site_manager.sites: @@ -1047,12 +1037,8 @@ class UiWebsocket(object): else: return {"error": "Invalid address"} - @flag.async_run def actionSiteListModifiedFiles(self, to, content_inner_path="content.json"): - content = self.site.content_manager.contents.get(content_inner_path) - if not content: - return {"error": "content file not avaliable"} - + content = self.site.content_manager.contents[content_inner_path] min_mtime = content.get("modified", 0) site_path = self.site.storage.directory modified_files = [] @@ -1064,9 +1050,6 @@ class UiWebsocket(object): inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys()) - if len(inner_paths) > 100: - return {"error": "Too many files in content.json"} - for relative_inner_path in inner_paths: inner_path = helper.getDirname(content_inner_path) + relative_inner_path try: @@ -1104,7 +1087,7 @@ class UiWebsocket(object): self.site.settings["cache"]["modified_files"] = modified_files return {"modified_files": modified_files} - @flag.admin + def actionSiteSetSettingsValue(self, to, key, value): if key not in ["modified_files_notification"]: return {"error": "Can't change this key"} @@ -1125,19 +1108,11 @@ class UiWebsocket(object): settings = self.user.settings self.response(to, settings) - @flag.admin def actionUserSetGlobalSettings(self, to, settings): self.user.settings = settings self.user.save() self.response(to, "ok") - @flag.admin - @flag.no_multiuser - def actionServerErrors(self, to): - return config.error_logger.lines - - @flag.admin - @flag.no_multiuser def actionServerUpdate(self, to): def cbServerUpdate(res): self.response(to, res) @@ -1152,7 +1127,6 @@ class UiWebsocket(object): import main main.update_after_shutdown = True - main.restart_after_shutdown = True SiteManager.site_manager.save() main.file_server.stop() main.ui_server.stop() @@ -1163,36 +1137,19 @@ class UiWebsocket(object): cbServerUpdate ) - @flag.admin - @flag.async_run - @flag.no_multiuser def actionServerPortcheck(self, to): import main file_server = main.file_server file_server.portCheck() self.response(to, file_server.port_opened) - @flag.admin - @flag.no_multiuser def actionServerShutdown(self, to, restart=False): import main - def cbServerShutdown(res): - self.response(to, res) - if not res: - return False - if restart: - main.restart_after_shutdown = True - main.file_server.stop() - main.ui_server.stop() - if restart: - message = [_["Restart ZeroNet client?"], _["Restart"]] - else: - message = [_["Shut down ZeroNet client?"], _["Shut down"]] - self.cmd("confirm", message, cbServerShutdown) + main.restart_after_shutdown = True + main.file_server.stop() + main.ui_server.stop() - @flag.admin - @flag.no_multiuser def actionServerShowdirectory(self, to, directory="backup", inner_path=""): if self.request.env["REMOTE_ADDR"] != "127.0.0.1": return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"}) @@ -1212,21 +1169,12 @@ class UiWebsocket(object): else: return self.response(to, {"error": "Not a directory"}) - @flag.admin - @flag.no_multiuser def actionConfigSet(self, to, key, value): import main - - self.log.debug("Changing config %s value to %r" % (key, value)) if key not in config.keys_api_change_allowed: - self.response(to, {"error": "Forbidden: You cannot set this config key"}) + self.response(to, {"error": "Forbidden you cannot set this config key"}) return - if key == "open_browser": - if value not in ["default_browser", "False"]: - self.response(to, {"error": "Forbidden: Invalid value"}) - return - # Remove empty lines from lists if type(value) is list: value = [line for line in value if line] diff --git a/src/Ui/media/Infopanel.coffee b/src/Ui/media/Infopanel.coffee index 3a490364..eb17eae7 100644 --- a/src/Ui/media/Infopanel.coffee +++ b/src/Ui/media/Infopanel.coffee @@ -3,22 +3,15 @@ class Infopanel @visible = false show: (closed=false) => - @elem.parent().addClass("visible") + @elem.addClass("visible") if closed @close() else @open() - unfold: => - @elem.toggleClass("unfolded") - return false - updateEvents: => @elem.off("click") @elem.find(".close").off("click") - @elem.find(".line").off("click") - - @elem.find(".line").on("click", @unfold) if @elem.hasClass("closed") @elem.on "click", => @@ -30,7 +23,7 @@ class Infopanel @close() hide: => - @elem.parent().removeClass("visible") + @elem.removeClass("visible") close: => @elem.addClass("closed") diff --git a/src/Ui/media/Loading.coffee b/src/Ui/media/Loading.coffee index 8e35ce66..7cd2479d 100644 --- a/src/Ui/media/Loading.coffee +++ b/src/Ui/media/Loading.coffee @@ -2,18 +2,15 @@ class Loading constructor: (@wrapper) -> if window.show_loadingscreen then @showScreen() @timer_hide = null - @timer_set = null setProgress: (percent) -> if @timer_hide clearInterval @timer_hide - @timer_set = RateLimit 500, -> + RateLimit 200, -> $(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block") hideProgress: -> - @log "hideProgress" - if @timer_set - clearInterval @timer_set + console.log "hideProgress" @timer_hide = setTimeout ( => $(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000) ), 300 @@ -26,7 +23,6 @@ class Loading showTooLarge: (site_info) -> - @log "Displaying large site confirmation" if $(".console .button-setlimit").length == 0 # Not displaying it yet line = @printLine("Site size: #{parseInt(site_info.settings.size/1024/1024)}MB is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning") button = $("" + "Open site and set size limit to #{site_info.next_size_limit}MB" + "") @@ -56,7 +52,7 @@ class Loading # We dont need loadingscreen anymore hideScreen: -> - @log "hideScreen" + console.log "hideScreen" if not $(".loadingscreen").hasClass("done") # Only if its not animating already if @screen_visible # Hide with animate $(".loadingscreen").addClass("done").removeLater(2000) @@ -84,8 +80,6 @@ class Loading if type == "warning" then line.addClass("console-warning") return line - log: (args...) -> - console.log "[Loading]", args... window.Loading = Loading diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee index 35d949f3..b31067fb 100644 --- a/src/Ui/media/Notifications.coffee +++ b/src/Ui/media/Notifications.coffee @@ -51,13 +51,13 @@ class Notifications ), timeout # Animate - width = Math.min(elem.outerWidth() + 70, 580) + width = elem.outerWidth() if not timeout then width += 20 # Add space for close button if elem.outerHeight() > 55 then elem.addClass("long") elem.css({"width": "50px", "transform": "scale(0.01)"}) elem.animate({"scale": 1}, 800, "easeOutElastic") elem.animate({"width": width}, 700, "easeInOutCubic") - $(".body", elem).css("width": (width - 50)) + $(".body", elem).css("width": (width - 80)) $(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000) # Close button or Confirm button diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee index 1b98855e..bdaa2c0c 100644 --- a/src/Ui/media/Wrapper.coffee +++ b/src/Ui/media/Wrapper.coffee @@ -34,7 +34,6 @@ class Wrapper @opener_tested = false @announcer_line = null @web_notifications = {} - @is_title_changed = false @allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors @@ -64,9 +63,6 @@ class Wrapper # Incoming message from UiServer websocket onMessageWebsocket: (e) => message = JSON.parse(e.data) - @handleMessageWebsocket(message) - - handleMessageWebsocket: (message) => cmd = message.cmd if cmd == "response" if @ws.waiting_cb[message.to]? # We are waiting for response @@ -172,9 +168,7 @@ class Wrapper else if cmd == "wrapperSetViewport" # Set the viewport @actionSetViewport(message) else if cmd == "wrapperSetTitle" - @log "wrapperSetTitle", message.params $("head title").text(message.params) - @is_title_changed = true else if cmd == "wrapperReload" # Reload current page @actionReload(message) else if cmd == "wrapperGetLocalStorage" @@ -420,13 +414,11 @@ class Wrapper @reload(message.params[0]) reload: (url_post="") -> - @log "Reload" - current_url = window.location.toString().replace(/#.*/g, "") if url_post - if current_url.indexOf("?") > 0 - window.location = current_url + "&" + url_post + if window.location.toString().indexOf("?") > 0 + window.location += "&"+url_post else - window.location = current_url + "?" + url_post + window.location += "?"+url_post else window.location.reload() @@ -496,14 +488,13 @@ class Wrapper # Iframe loaded onPageLoad: (e) => - @log "onPageLoad" @inner_loaded = true if not @inner_ready then @sendInner {"cmd": "wrapperReady"} # Inner frame loaded before wrapper #if not @site_error then @loading.hideScreen() # Hide loading screen if @ws.ws.readyState == 1 and not @site_info # Ws opened @reloadSiteInfo() - else if @site_info and @site_info.content?.title? and not @is_title_changed - window.document.title = @site_info.content.title + " - ZeroNet" + else if @site_info and @site_info.content?.title? + window.document.title = @site_info.content.title+" - ZeroNet" @log "Setting title to", window.document.title onWrapperLoad: => @@ -531,13 +522,16 @@ class Wrapper @address = site_info.address @setSiteInfo site_info - if site_info.settings.size > site_info.size_limit * 1024 * 1024 and not @loading.screen_visible # Site size too large and not displaying it yet - @displayConfirm "Site is larger than allowed: #{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB", "Set limit to #{site_info.next_size_limit}MB", => - @ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) => - if res == "ok" - @notifications.add("size_limit", "done", "Site storage limit modified!", 5000) + if site_info.settings.size > site_info.size_limit*1024*1024 # Site size too large and not displaying it yet + if @loading.screen_visible + @loading.showTooLarge(site_info) + else + @displayConfirm "Site is larger than allowed: #{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB", "Set limit to #{site_info.next_size_limit}MB", => + @ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) => + if res == "ok" + @notifications.add("size_limit", "done", "Site storage limit modified!", 5000) - if site_info.content?.title? and not @is_title_changed + if site_info.content?.title? window.document.title = site_info.content.title + " - ZeroNet" @log "Setting title to", window.document.title @@ -554,9 +548,9 @@ class Wrapper if site_info.event[1] == window.file_inner_path # File downloaded we currently on @loading.hideScreen() if not @site_info then @reloadSiteInfo() - if site_info.content and not @is_title_changed - window.document.title = site_info.content.title + " - ZeroNet" - @log "Required file #{window.file_inner_path} done, setting title to", window.document.title + if site_info.content + window.document.title = site_info.content.title+" - ZeroNet" + @log "Required file done, setting title to", window.document.title if not window.show_loadingscreen @notifications.add("modified", "info", "New version of this page has just released.
    Reload to see the modified content.") # File failed downloading @@ -586,17 +580,12 @@ class Wrapper @notifications.add("size_limit", "done", "Site storage limit modified!", 5000) return false - if @loading.screen_visible and @inner_loaded and site_info.settings.size < site_info.size_limit * 1024 * 1024 and site_info.settings.size > 0 # Loading screen still visible, but inner loaded - @log "Loading screen visible, but inner loaded" + if @loading.screen_visible and @inner_loaded and site_info.settings.size < site_info.size_limit*1024*1024 and site_info.settings.size > 0 # Loading screen still visible, but inner loaded @loading.hideScreen() if site_info?.settings?.own and site_info?.settings?.modified != @site_info?.settings?.modified @updateModifiedPanel() - if @loading.screen_visible and site_info.settings.size > site_info.size_limit * 1024 * 1024 - @log "Site too large" - @loading.showTooLarge(site_info) - @site_info = site_info @event_site_info.resolve() @@ -626,7 +615,7 @@ class Wrapper updateModifiedPanel: => @ws.cmd "siteListModifiedFiles", [], (res) => - num = res.modified_files?.length + num = res.modified_files.length if num > 0 closed = @site_info.settings.modified_files_notification == false @infopanel.show(closed) @@ -645,7 +634,8 @@ class Wrapper @notifications.add "sign", "done", "content.json Signed!", 5000 @sitePublish("content.json") return false - @log "siteListModifiedFiles", num, res + + @log "siteListModifiedFiles", res setAnnouncerInfo: (announcer_info) -> status_db = {announcing: [], error: [], announced: []} @@ -658,7 +648,7 @@ class Wrapper else @announcer_line = @loading.printLine(status_line) - if status_db.error.length > (status_db.announced.length + status_db.announcing.length) and status_db.announced.length < 3 + if status_db.error.length > (status_db.announced.length + status_db.announcing.length) @loading.showTrackerTorBridge(@server_info) updateProgress: (site_info) -> @@ -681,13 +671,11 @@ class Wrapper setSizeLimit: (size_limit, reload=true) => - @log "setSizeLimit: #{size_limit}, reload: #{reload}" - @inner_loaded = false # Inner frame not loaded, just a 404 page displayed @ws.cmd "siteSetLimit", [size_limit], (res) => if res != "ok" return false @loading.printLine res - @inner_loaded = false + @inner_loaded = false # Inner frame not loaded, just a 404 page displayed if reload then @reloadIframe() return false diff --git a/src/Ui/media/Wrapper.css b/src/Ui/media/Wrapper.css index 67e35a84..4b90bcfb 100644 --- a/src/Ui/media/Wrapper.css +++ b/src/Ui/media/Wrapper.css @@ -8,10 +8,7 @@ a { color: black } #inner-iframe { width: 100%; height: 100%; position: absolute; border: 0; } /*; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out*/ #inner-iframe.back { transform: scale(0.95) translate(-300px, 0); opacity: 0.4 } -.button { - padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; - border-radius: 2px; text-decoration: none; transition: all 0.5s; background-position: left center; white-space: nowrap; -} +.button { padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; border-radius: 2px; text-decoration: none; transition: all 0.5s; background-position: left center; } .button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; transition: none } .button:active { position: relative; top: 1px } .button:focus { outline: none } @@ -47,31 +44,27 @@ a { color: black } .notifications { position: absolute; top: 0; right: 80px; display: inline-block; z-index: 999; white-space: nowrap } .notification { - position: relative; float: right; clear: both; margin: 10px; box-sizing: border-box; overflow: hidden; backface-visibility: hidden; - perspective: 1000px; padding-bottom: 5px; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; - font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ + position: relative; float: right; clear: both; margin: 10px; box-sizing: border-box; overflow: hidden; backface-visibility: hidden; perspective: 1000px; padding-bottom: 5px; + color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ } .notification-icon { display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 2; text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white; } .notification .body { - border-right: 40px solid transparent; padding-left: 14px; padding-right: 60px; height: 50px; vertical-align: middle; display: table; padding-right: 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; + padding-left: 14px; padding-right: 60px; height: 40px; vertical-align: middle; display: table; background-color: white; left: 50px; top: 0; position: relative; padding-top: 5px; padding-bottom: 5px; } .notification .message-outer { display: table-row } -.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; padding-right: 20px; text-align: right; } +.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; } .notification.long .body { padding-top: 10px; padding-bottom: 10px } -.notification .message { display: table-cell; vertical-align: middle; max-width: 500px; white-space: normal; } +.notification .message { display: table-cell; vertical-align: middle; } .notification.visible { max-width: 350px } -.notification .close:hover { opacity: 0.8 } -.notification .close { - position: absolute; top: 0; right: 0; text-decoration: none; margin: 10px; padding: 0px; display: block; width: 30px; height: 30px; - text-align: center; background-color: tomato; line-height: 30px; vertical-align: bottom; font-size: 30px; color: white; -} - +.notification .close { position: absolute; top: 0; right: 0; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none } +.notification .close:hover { color: black } +.notification .close:active, .notification .close:focus { color: #AF3BFF } .notification small { color: #AAA } .notification .multiline { white-space: normal; word-break: break-word; max-width: 300px; } .body-white .notification { box-shadow: 0 1px 9px rgba(0,0,0,0.1) } @@ -79,7 +72,7 @@ a { color: black } /* Notification select */ .notification .select { display: block; padding: 10px; margin-right: -32px; text-decoration: none; border-left: 3px solid #EEE; - margin-top: 10px; transition: all 0.3s; color: #666 + margin-top: 1px; transition: all 0.3s; color: #666 } .notification .select:hover, .notification .select.active { background-color: #007AFF; border-left: 3px solid #5D68FF; color: white; transition: none } .notification .select:active, .notification .select:focus { background-color: #3396FF; color: white; transition: none; border-left-color: #3396FF } @@ -113,14 +106,12 @@ a { color: black } /* Infopanel */ -.infopanel-container { width: 100%; height: 100%; overflow: hidden; position: absolute; display: none; } -.infopanel-container.visible { display: block; } .infopanel { - position: absolute; z-index: 999; padding: 15px 15px; bottom: 25px; right: 50px; border: 1px solid #eff3fe; - font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); - background-color: white; border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; - transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); + position: absolute; z-index: 999; padding: 15px 15px; bottom: 55px; right: 50px; border: 1px solid #eff3fe; display: none; + font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); background-color: white; + border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); } +.infopanel.visible { display: block; } .infopanel.closed { box-shadow: none; transform: translateX(100%); right: 0px; cursor: pointer; } .infopanel .message { font-size: 13px; line-height: 15px; display: inline-block; vertical-align: -9px; } .infopanel .message .line { max-width: 200px; display: inline-block; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; } @@ -134,13 +125,12 @@ a { color: black } padding: 4px; border-top-left-radius: 6px; border-bottom-left-radius: 6px; font-size: 10px; opacity: 0; margin-left: 0px; pointer-events: none; transition: all 0.6s; } -.infopanel.unfolded .message .line { overflow: visible; white-space: normal; } .body-sidebar .infopanel { right: 425px; } .body-sidebar .infopanel.closed { right: 0px; } /* Loading screen */ -.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: auto; display: none } +.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: hidden; display: none } .theme-dark .loadingscreen { background-color: #180922; } .loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% } .loading-config { @@ -152,16 +142,16 @@ a { color: black } .loadingscreen.ready .loading-config { top: 0px; } -/* Loading console */ -.loadingscreen .console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; transform: translateY(-20px); } -.loadingscreen .console-line:last-child { color: #6C6767 } -.loadingscreen .console .cursor { +/* Console */ +.console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; transform: translateY(-20px); } +.console-line:last-child { color: #6C6767 } +.console .cursor { background-color: #999; color: #999; animation: pulse 1.5s infinite ease-in-out; margin-right: -9px; display: inline-block; width: 9px; height: 19px; vertical-align: -4px; } -.loadingscreen .console .console-error { color: #e74c3c; font-weight: bold; animation: pulse 2s infinite linear } -.loadingscreen .console .console-warning { color: #8e44ad; } -.loadingscreen .console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } +.console .console-error { color: #e74c3c; font-weight: bold; animation: pulse 2s infinite linear } +.console .console-warning { color: #8e44ad; } +.console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } /* Flipper loading anim */ @@ -186,7 +176,7 @@ a { color: black } .progressbar { background: #26C281; position: fixed; width: 100%; z-index: 100; top: 0; left: 0; transform: scaleX(0); transform-origin: 0% 0%; transform:translate3d(0,0,0); - height: 2px; transition: transform 1s, opacity 1s; display: none; backface-visibility: hidden; transform-style: preserve-3d; + height: 2px; transition: transform 0.5s, opacity 1s; display: none; backface-visibility: hidden; transform-style: preserve-3d; } .progressbar .peg { display: block; position: absolute; right: 0; width: 100px; height: 100%; @@ -226,8 +216,8 @@ a { color: black } /* Small screen */ @media screen and (max-width: 600px) { .notification .message { white-space: normal; } - .notification .buttons { padding-right: 22px; padding-right: 40px; } + .notification .buttons { padding-right: 22px; } .notification .button { white-space: nowrap; } - .notification { margin: 0px; } - .notifications { right: 0px; max-width: 80%; } + .notification { margin: 0px } + .notifications { right: 0px } } diff --git a/src/Ui/media/ZeroSiteTheme.coffee b/src/Ui/media/ZeroSiteTheme.coffee index 79adb671..47144051 100644 --- a/src/Ui/media/ZeroSiteTheme.coffee +++ b/src/Ui/media/ZeroSiteTheme.coffee @@ -9,10 +9,10 @@ changeColorScheme = (theme) -> zeroframe.cmd "userGetGlobalSettings", [], (user_settings) -> if user_settings.theme != theme user_settings.theme = theme - zeroframe.cmd "userSetGlobalSettings", [user_settings], (status) -> - if status == "ok" - location.reload() - return + zeroframe.cmd "userSetGlobalSettings", [user_settings] + + location.reload() + return return @@ -21,12 +21,7 @@ displayNotification = ({matches, media}) -> if !matches return - zeroframe.cmd "siteInfo", [], (site_info) -> - if "ADMIN" in site_info.settings.permissions - zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.
    Please reload site to use it."] - else - zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.
    Please open ZeroHello to use it."] - return + zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.
    Please reload site to use it."] return diff --git a/src/Ui/media/all.css b/src/Ui/media/all.css index bd54cf34..eedcc074 100644 --- a/src/Ui/media/all.css +++ b/src/Ui/media/all.css @@ -1,5 +1,6 @@ -/* ---- Wrapper.css ---- */ + +/* ---- src/Ui/media/Wrapper.css ---- */ body { margin: 0; padding: 0; height: 100%; background-color: #D2CECD; overflow: hidden } @@ -12,10 +13,7 @@ a { color: black } #inner-iframe { width: 100%; height: 100%; position: absolute; border: 0; } /*; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out*/ #inner-iframe.back { -webkit-transform: scale(0.95) translate(-300px, 0); -moz-transform: scale(0.95) translate(-300px, 0); -o-transform: scale(0.95) translate(-300px, 0); -ms-transform: scale(0.95) translate(-300px, 0); transform: scale(0.95) translate(-300px, 0) ; opacity: 0.4 } -.button { - padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; - -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; text-decoration: none; -webkit-transition: all 0.5s; -moz-transition: all 0.5s; -o-transition: all 0.5s; -ms-transition: all 0.5s; transition: all 0.5s ; background-position: left center; white-space: nowrap; -} +.button { padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; text-decoration: none; -webkit-transition: all 0.5s; -moz-transition: all 0.5s; -o-transition: all 0.5s; -ms-transition: all 0.5s; transition: all 0.5s ; background-position: left center; } .button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none } .button:active { position: relative; top: 1px } .button:focus { outline: none } @@ -51,22 +49,21 @@ a { color: black } .notifications { position: absolute; top: 0; right: 80px; display: inline-block; z-index: 999; white-space: nowrap } .notification { - position: relative; float: right; clear: both; margin: 10px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; overflow: hidden; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; - -webkit-perspective: 1000px; -moz-perspective: 1000px; -o-perspective: 1000px; -ms-perspective: 1000px; perspective: 1000px ; padding-bottom: 5px; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; - font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ + position: relative; float: right; clear: both; margin: 10px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; overflow: hidden; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; -webkit-perspective: 1000px; -moz-perspective: 1000px; -o-perspective: 1000px; -ms-perspective: 1000px; perspective: 1000px ; padding-bottom: 5px; + color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ } .notification-icon { display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 2; text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white; } .notification .body { - padding-left: 14px; padding-right: 60px; height: 50px; vertical-align: middle; display: table; padding-right: 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; + padding-left: 14px; padding-right: 60px; height: 40px; vertical-align: middle; display: table; background-color: white; left: 50px; top: 0; position: relative; padding-top: 5px; padding-bottom: 5px; } .notification .message-outer { display: table-row } -.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; padding-right: 20px; text-align: right; } +.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; } .notification.long .body { padding-top: 10px; padding-bottom: 10px } -.notification .message { display: table-cell; vertical-align: middle; max-width: 500px; white-space: normal; } +.notification .message { display: table-cell; vertical-align: middle; } .notification.visible { max-width: 350px } @@ -123,14 +120,12 @@ a { color: black } /* Infopanel */ -.infopanel-container { width: 100%; height: 100%; overflow: hidden; position: absolute; display: none; } -.infopanel-container.visible { display: block; } .infopanel { - position: absolute; z-index: 999; padding: 15px 15px; bottom: 25px; right: 50px; border: 1px solid #eff3fe; - font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; -webkit-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -moz-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -o-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -ms-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17) ; - background-color: white; border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; - -webkit-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -moz-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -o-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -ms-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1) ; + position: absolute; z-index: 999; padding: 15px 15px; bottom: 55px; right: 50px; border: 1px solid #eff3fe; display: none; + font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; -webkit-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -moz-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -o-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -ms-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17) ; background-color: white; + border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; -webkit-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -moz-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -o-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -ms-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1) ; } +.infopanel.visible { display: block; } .infopanel.closed { -webkit-box-shadow: none; -moz-box-shadow: none; -o-box-shadow: none; -ms-box-shadow: none; box-shadow: none ; -webkit-transform: translateX(100%); -moz-transform: translateX(100%); -o-transform: translateX(100%); -ms-transform: translateX(100%); transform: translateX(100%) ; right: 0px; cursor: pointer; } .infopanel .message { font-size: 13px; line-height: 15px; display: inline-block; vertical-align: -9px; } .infopanel .message .line { max-width: 200px; display: inline-block; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; } @@ -144,13 +139,12 @@ a { color: black } padding: 4px; border-top-left-radius: 6px; border-bottom-left-radius: 6px; font-size: 10px; opacity: 0; margin-left: 0px; pointer-events: none; -webkit-transition: all 0.6s; -moz-transition: all 0.6s; -o-transition: all 0.6s; -ms-transition: all 0.6s; transition: all 0.6s ; } -.infopanel.unfolded .message .line { overflow: visible; white-space: normal; } .body-sidebar .infopanel { right: 425px; } .body-sidebar .infopanel.closed { right: 0px; } /* Loading screen */ -.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: auto; display: none } +.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: hidden; display: none } .theme-dark .loadingscreen { background-color: #180922; } .loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% } .loading-config { @@ -162,16 +156,16 @@ a { color: black } .loadingscreen.ready .loading-config { top: 0px; } -/* Loading console */ -.loadingscreen .console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; -webkit-transform: translateY(-20px); -moz-transform: translateY(-20px); -o-transform: translateY(-20px); -ms-transform: translateY(-20px); transform: translateY(-20px) ; } -.loadingscreen .console-line:last-child { color: #6C6767 } -.loadingscreen .console .cursor { +/* Console */ +.console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; -webkit-transform: translateY(-20px); -moz-transform: translateY(-20px); -o-transform: translateY(-20px); -ms-transform: translateY(-20px); transform: translateY(-20px) ; } +.console-line:last-child { color: #6C6767 } +.console .cursor { background-color: #999; color: #999; -webkit-animation: pulse 1.5s infinite ease-in-out; -moz-animation: pulse 1.5s infinite ease-in-out; -o-animation: pulse 1.5s infinite ease-in-out; -ms-animation: pulse 1.5s infinite ease-in-out; animation: pulse 1.5s infinite ease-in-out ; margin-right: -9px; display: inline-block; width: 9px; height: 19px; vertical-align: -4px; } -.loadingscreen .console .console-error { color: #e74c3c; font-weight: bold; -webkit-animation: pulse 2s infinite linear ; -moz-animation: pulse 2s infinite linear ; -o-animation: pulse 2s infinite linear ; -ms-animation: pulse 2s infinite linear ; animation: pulse 2s infinite linear } -.loadingscreen .console .console-warning { color: #8e44ad; } -.loadingscreen .console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } +.console .console-error { color: #e74c3c; font-weight: bold; -webkit-animation: pulse 2s infinite linear ; -moz-animation: pulse 2s infinite linear ; -o-animation: pulse 2s infinite linear ; -ms-animation: pulse 2s infinite linear ; animation: pulse 2s infinite linear } +.console .console-warning { color: #8e44ad; } +.console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } /* Flipper loading anim */ @@ -196,7 +190,7 @@ a { color: black } .progressbar { background: #26C281; position: fixed; width: 100%; z-index: 100; top: 0; left: 0; -webkit-transform: scaleX(0); -moz-transform: scaleX(0); -o-transform: scaleX(0); -ms-transform: scaleX(0); transform: scaleX(0) ; transform-origin: 0% 0%; transform:translate3d(0,0,0); - height: 2px; -webkit-transition: transform 1s, opacity 1s; -moz-transition: transform 1s, opacity 1s; -o-transition: transform 1s, opacity 1s; -ms-transition: transform 1s, opacity 1s; transition: transform 1s, opacity 1s ; display: none; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; transform-style: preserve-3d; + height: 2px; -webkit-transition: transform 0.5s, opacity 1s; -moz-transition: transform 0.5s, opacity 1s; -o-transition: transform 0.5s, opacity 1s; -ms-transition: transform 0.5s, opacity 1s; transition: transform 0.5s, opacity 1s ; display: none; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; transform-style: preserve-3d; } .progressbar .peg { display: block; position: absolute; right: 0; width: 100px; height: 100%; @@ -262,8 +256,8 @@ a { color: black } /* Small screen */ @media screen and (max-width: 600px) { .notification .message { white-space: normal; } - .notification .buttons { padding-right: 22px; padding-right: 40px; } + .notification .buttons { padding-right: 22px; } .notification .button { white-space: nowrap; } - .notification { margin: 0px; } - .notifications { right: 0px; max-width: 80%; } + .notification { margin: 0px } + .notifications { right: 0px } } diff --git a/src/Ui/media/all.js b/src/Ui/media/all.js index f5ad947c..24bb2a81 100644 --- a/src/Ui/media/all.js +++ b/src/Ui/media/all.js @@ -1,12 +1,12 @@ -/* ---- lib/00-jquery.min.js ---- */ +/* ---- src/Ui/media/lib/00-jquery.min.js ---- */ /*! jQuery v3.3.1 | (c) JS Foundation and other contributors | jquery.org/license */ !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use strict";var n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function e(t){return"function"==typeof t&&"number"!=typeof t.nodeType},y=function e(t){return null!=t&&t===t.window},v={type:!0,src:!0,noModule:!0};function m(e,t,n){var i,o=(t=t||r).createElement("script");if(o.text=e,n)for(i in v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[c.call(e)]||"object":typeof e}var b="3.3.1",w=function(e,t){return new w.fn.init(e,t)},T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.fn=w.prototype={jquery:"3.3.1",constructor:w,length:0,toArray:function(){return o.call(this)},get:function(e){return null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=w.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return w.each(this,e)},map:function(e){return this.pushStack(w.map(this,function(t,n){return e.call(t,n,t)}))},slice:function(){return this.pushStack(o.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(n>=0&&n0&&t-1 in e)}var E=function(e){var t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b="sizzle"+1*new Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var n=0,r=e.length;n+~]|"+M+")"+M+"*"),z=new RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),X=new RegExp(W),U=new RegExp("^"+R+"$"),V={ID:new RegExp("^#("+R+")"),CLASS:new RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new RegExp("^"+I),PSEUDO:new RegExp("^"+W),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+P+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},G=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Q=/^[^{]+\{\s*\[native \w/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,K=/[+~]/,Z=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ee=function(e,t,n){var r="0x"+t-65536;return r!==r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},te=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ne=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},re=function(){p()},ie=me(function(e){return!0===e.disabled&&("form"in e||"label"in e)},{dir:"parentNode",next:"legend"});try{L.apply(A=H.call(w.childNodes),w.childNodes),A[w.childNodes.length].nodeType}catch(e){L={apply:A.length?function(e,t){q.apply(e,H.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function oe(e,t,r,i){var o,s,l,c,f,h,v,m=t&&t.ownerDocument,T=t?t.nodeType:9;if(r=r||[],"string"!=typeof e||!e||1!==T&&9!==T&&11!==T)return r;if(!i&&((t?t.ownerDocument||t:w)!==d&&p(t),t=t||d,g)){if(11!==T&&(f=J.exec(e)))if(o=f[1]){if(9===T){if(!(l=t.getElementById(o)))return r;if(l.id===o)return r.push(l),r}else if(m&&(l=m.getElementById(o))&&x(t,l)&&l.id===o)return r.push(l),r}else{if(f[2])return L.apply(r,t.getElementsByTagName(e)),r;if((o=f[3])&&n.getElementsByClassName&&t.getElementsByClassName)return L.apply(r,t.getElementsByClassName(o)),r}if(n.qsa&&!S[e+" "]&&(!y||!y.test(e))){if(1!==T)m=t,v=e;else if("object"!==t.nodeName.toLowerCase()){(c=t.getAttribute("id"))?c=c.replace(te,ne):t.setAttribute("id",c=b),s=(h=a(e)).length;while(s--)h[s]="#"+c+" "+ve(h[s]);v=h.join(","),m=K.test(e)&&ge(t.parentNode)||t}if(v)try{return L.apply(r,m.querySelectorAll(v)),r}catch(e){}finally{c===b&&t.removeAttribute("id")}}}return u(e.replace(B,"$1"),t,r,i)}function ae(){var e=[];function t(n,i){return e.push(n+" ")>r.cacheLength&&delete t[e.shift()],t[n+" "]=i}return t}function se(e){return e[b]=!0,e}function ue(e){var t=d.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function le(e,t){var n=e.split("|"),i=n.length;while(i--)r.attrHandle[n[i]]=t}function ce(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function fe(e){return function(t){return"input"===t.nodeName.toLowerCase()&&t.type===e}}function pe(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function de(e){return function(t){return"form"in t?t.parentNode&&!1===t.disabled?"label"in t?"label"in t.parentNode?t.parentNode.disabled===e:t.disabled===e:t.isDisabled===e||t.isDisabled!==!e&&ie(t)===e:t.disabled===e:"label"in t&&t.disabled===e}}function he(e){return se(function(t){return t=+t,se(function(n,r){var i,o=e([],n.length,t),a=o.length;while(a--)n[i=o[a]]&&(n[i]=!(r[i]=n[i]))})})}function ge(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}n=oe.support={},o=oe.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return!!t&&"HTML"!==t.nodeName},p=oe.setDocument=function(e){var t,i,a=e?e.ownerDocument||e:w;return a!==d&&9===a.nodeType&&a.documentElement?(d=a,h=d.documentElement,g=!o(d),w!==d&&(i=d.defaultView)&&i.top!==i&&(i.addEventListener?i.addEventListener("unload",re,!1):i.attachEvent&&i.attachEvent("onunload",re)),n.attributes=ue(function(e){return e.className="i",!e.getAttribute("className")}),n.getElementsByTagName=ue(function(e){return e.appendChild(d.createComment("")),!e.getElementsByTagName("*").length}),n.getElementsByClassName=Q.test(d.getElementsByClassName),n.getById=ue(function(e){return h.appendChild(e).id=b,!d.getElementsByName||!d.getElementsByName(b).length}),n.getById?(r.filter.ID=function(e){var t=e.replace(Z,ee);return function(e){return e.getAttribute("id")===t}},r.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&g){var n=t.getElementById(e);return n?[n]:[]}}):(r.filter.ID=function(e){var t=e.replace(Z,ee);return function(e){var n="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return n&&n.value===t}},r.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&g){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),r.find.TAG=n.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):n.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},r.find.CLASS=n.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&g)return t.getElementsByClassName(e)},v=[],y=[],(n.qsa=Q.test(d.querySelectorAll))&&(ue(function(e){h.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&y.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||y.push("\\["+M+"*(?:value|"+P+")"),e.querySelectorAll("[id~="+b+"-]").length||y.push("~="),e.querySelectorAll(":checked").length||y.push(":checked"),e.querySelectorAll("a#"+b+"+*").length||y.push(".#.+[+~]")}),ue(function(e){e.innerHTML="";var t=d.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&y.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&y.push(":enabled",":disabled"),h.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&y.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),y.push(",.*:")})),(n.matchesSelector=Q.test(m=h.matches||h.webkitMatchesSelector||h.mozMatchesSelector||h.oMatchesSelector||h.msMatchesSelector))&&ue(function(e){n.disconnectedMatch=m.call(e,"*"),m.call(e,"[s!='']:x"),v.push("!=",W)}),y=y.length&&new RegExp(y.join("|")),v=v.length&&new RegExp(v.join("|")),t=Q.test(h.compareDocumentPosition),x=t||Q.test(h.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return f=!0,0;var r=!e.compareDocumentPosition-!t.compareDocumentPosition;return r||(1&(r=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!n.sortDetached&&t.compareDocumentPosition(e)===r?e===d||e.ownerDocument===w&&x(w,e)?-1:t===d||t.ownerDocument===w&&x(w,t)?1:c?O(c,e)-O(c,t):0:4&r?-1:1)}:function(e,t){if(e===t)return f=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===d?-1:t===d?1:i?-1:o?1:c?O(c,e)-O(c,t):0;if(i===o)return ce(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?ce(a[r],s[r]):a[r]===w?-1:s[r]===w?1:0},d):d},oe.matches=function(e,t){return oe(e,null,null,t)},oe.matchesSelector=function(e,t){if((e.ownerDocument||e)!==d&&p(e),t=t.replace(z,"='$1']"),n.matchesSelector&&g&&!S[t+" "]&&(!v||!v.test(t))&&(!y||!y.test(t)))try{var r=m.call(e,t);if(r||n.disconnectedMatch||e.document&&11!==e.document.nodeType)return r}catch(e){}return oe(t,d,null,[e]).length>0},oe.contains=function(e,t){return(e.ownerDocument||e)!==d&&p(e),x(e,t)},oe.attr=function(e,t){(e.ownerDocument||e)!==d&&p(e);var i=r.attrHandle[t.toLowerCase()],o=i&&N.call(r.attrHandle,t.toLowerCase())?i(e,t,!g):void 0;return void 0!==o?o:n.attributes||!g?e.getAttribute(t):(o=e.getAttributeNode(t))&&o.specified?o.value:null},oe.escape=function(e){return(e+"").replace(te,ne)},oe.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},oe.uniqueSort=function(e){var t,r=[],i=0,o=0;if(f=!n.detectDuplicates,c=!n.sortStable&&e.slice(0),e.sort(D),f){while(t=e[o++])t===e[o]&&(i=r.push(o));while(i--)e.splice(r[i],1)}return c=null,e},i=oe.getText=function(e){var t,n="",r=0,o=e.nodeType;if(o){if(1===o||9===o||11===o){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=i(e)}else if(3===o||4===o)return e.nodeValue}else while(t=e[r++])n+=i(t);return n},(r=oe.selectors={cacheLength:50,createPseudo:se,match:V,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(Z,ee),e[3]=(e[3]||e[4]||e[5]||"").replace(Z,ee),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||oe.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&oe.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return V.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=a(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(Z,ee).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=E[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&E(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var i=oe.attr(r,e);return null==i?"!="===t:!t||(i+="","="===t?i===n:"!="===t?i!==n:"^="===t?n&&0===i.indexOf(n):"*="===t?n&&i.indexOf(n)>-1:"$="===t?n&&i.slice(-n.length)===n:"~="===t?(" "+i.replace($," ")+" ").indexOf(n)>-1:"|="===t&&(i===n||i.slice(0,n.length+1)===n+"-"))}},CHILD:function(e,t,n,r,i){var o="nth"!==e.slice(0,3),a="last"!==e.slice(-4),s="of-type"===t;return 1===r&&0===i?function(e){return!!e.parentNode}:function(t,n,u){var l,c,f,p,d,h,g=o!==a?"nextSibling":"previousSibling",y=t.parentNode,v=s&&t.nodeName.toLowerCase(),m=!u&&!s,x=!1;if(y){if(o){while(g){p=t;while(p=p[g])if(s?p.nodeName.toLowerCase()===v:1===p.nodeType)return!1;h=g="only"===e&&!h&&"nextSibling"}return!0}if(h=[a?y.firstChild:y.lastChild],a&&m){x=(d=(l=(c=(f=(p=y)[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]||[])[0]===T&&l[1])&&l[2],p=d&&y.childNodes[d];while(p=++d&&p&&p[g]||(x=d=0)||h.pop())if(1===p.nodeType&&++x&&p===t){c[e]=[T,d,x];break}}else if(m&&(x=d=(l=(c=(f=(p=t)[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]||[])[0]===T&&l[1]),!1===x)while(p=++d&&p&&p[g]||(x=d=0)||h.pop())if((s?p.nodeName.toLowerCase()===v:1===p.nodeType)&&++x&&(m&&((c=(f=p[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]=[T,x]),p===t))break;return(x-=i)===r||x%r==0&&x/r>=0}}},PSEUDO:function(e,t){var n,i=r.pseudos[e]||r.setFilters[e.toLowerCase()]||oe.error("unsupported pseudo: "+e);return i[b]?i(t):i.length>1?(n=[e,e,"",t],r.setFilters.hasOwnProperty(e.toLowerCase())?se(function(e,n){var r,o=i(e,t),a=o.length;while(a--)e[r=O(e,o[a])]=!(n[r]=o[a])}):function(e){return i(e,0,n)}):i}},pseudos:{not:se(function(e){var t=[],n=[],r=s(e.replace(B,"$1"));return r[b]?se(function(e,t,n,i){var o,a=r(e,null,i,[]),s=e.length;while(s--)(o=a[s])&&(e[s]=!(t[s]=o))}):function(e,i,o){return t[0]=e,r(t,null,o,n),t[0]=null,!n.pop()}}),has:se(function(e){return function(t){return oe(e,t).length>0}}),contains:se(function(e){return e=e.replace(Z,ee),function(t){return(t.textContent||t.innerText||i(t)).indexOf(e)>-1}}),lang:se(function(e){return U.test(e||"")||oe.error("unsupported lang: "+e),e=e.replace(Z,ee).toLowerCase(),function(t){var n;do{if(n=g?t.lang:t.getAttribute("xml:lang")||t.getAttribute("lang"))return(n=n.toLowerCase())===e||0===n.indexOf(e+"-")}while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===h},focus:function(e){return e===d.activeElement&&(!d.hasFocus||d.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:de(!1),disabled:de(!0),checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!r.pseudos.empty(e)},header:function(e){return Y.test(e.nodeName)},input:function(e){return G.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:he(function(){return[0]}),last:he(function(e,t){return[t-1]}),eq:he(function(e,t,n){return[n<0?n+t:n]}),even:he(function(e,t){for(var n=0;n=0;)e.push(r);return e}),gt:he(function(e,t,n){for(var r=n<0?n+t:n;++r1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function be(e,t,n){for(var r=0,i=t.length;r-1&&(o[l]=!(a[l]=f))}}else v=we(v===a?v.splice(h,v.length):v),i?i(null,a,v,u):L.apply(a,v)})}function Ce(e){for(var t,n,i,o=e.length,a=r.relative[e[0].type],s=a||r.relative[" "],u=a?1:0,c=me(function(e){return e===t},s,!0),f=me(function(e){return O(t,e)>-1},s,!0),p=[function(e,n,r){var i=!a&&(r||n!==l)||((t=n).nodeType?c(e,n,r):f(e,n,r));return t=null,i}];u1&&xe(p),u>1&&ve(e.slice(0,u-1).concat({value:" "===e[u-2].type?"*":""})).replace(B,"$1"),n,u0,i=e.length>0,o=function(o,a,s,u,c){var f,h,y,v=0,m="0",x=o&&[],b=[],w=l,C=o||i&&r.find.TAG("*",c),E=T+=null==w?1:Math.random()||.1,k=C.length;for(c&&(l=a===d||a||c);m!==k&&null!=(f=C[m]);m++){if(i&&f){h=0,a||f.ownerDocument===d||(p(f),s=!g);while(y=e[h++])if(y(f,a||d,s)){u.push(f);break}c&&(T=E)}n&&((f=!y&&f)&&v--,o&&x.push(f))}if(v+=m,n&&m!==v){h=0;while(y=t[h++])y(x,b,a,s);if(o){if(v>0)while(m--)x[m]||b[m]||(b[m]=j.call(u));b=we(b)}L.apply(u,b),c&&!o&&b.length>0&&v+t.length>1&&oe.uniqueSort(u)}return c&&(T=E,l=w),x};return n?se(o):o}return s=oe.compile=function(e,t){var n,r=[],i=[],o=S[e+" "];if(!o){t||(t=a(e)),n=t.length;while(n--)(o=Ce(t[n]))[b]?r.push(o):i.push(o);(o=S(e,Ee(i,r))).selector=e}return o},u=oe.select=function(e,t,n,i){var o,u,l,c,f,p="function"==typeof e&&e,d=!i&&a(e=p.selector||e);if(n=n||[],1===d.length){if((u=d[0]=d[0].slice(0)).length>2&&"ID"===(l=u[0]).type&&9===t.nodeType&&g&&r.relative[u[1].type]){if(!(t=(r.find.ID(l.matches[0].replace(Z,ee),t)||[])[0]))return n;p&&(t=t.parentNode),e=e.slice(u.shift().value.length)}o=V.needsContext.test(e)?0:u.length;while(o--){if(l=u[o],r.relative[c=l.type])break;if((f=r.find[c])&&(i=f(l.matches[0].replace(Z,ee),K.test(u[0].type)&&ge(t.parentNode)||t))){if(u.splice(o,1),!(e=i.length&&ve(u)))return L.apply(n,i),n;break}}}return(p||s(e,d))(i,t,!g,n,!t||K.test(e)&&ge(t.parentNode)||t),n},n.sortStable=b.split("").sort(D).join("")===b,n.detectDuplicates=!!f,p(),n.sortDetached=ue(function(e){return 1&e.compareDocumentPosition(d.createElement("fieldset"))}),ue(function(e){return e.innerHTML="","#"===e.firstChild.getAttribute("href")})||le("type|href|height|width",function(e,t,n){if(!n)return e.getAttribute(t,"type"===t.toLowerCase()?1:2)}),n.attributes&&ue(function(e){return e.innerHTML="",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||le("value",function(e,t,n){if(!n&&"input"===e.nodeName.toLowerCase())return e.defaultValue}),ue(function(e){return null==e.getAttribute("disabled")})||le(P,function(e,t,n){var r;if(!n)return!0===e[t]?t.toLowerCase():(r=e.getAttributeNode(t))&&r.specified?r.value:null}),oe}(e);w.find=E,w.expr=E.selectors,w.expr[":"]=w.expr.pseudos,w.uniqueSort=w.unique=E.uniqueSort,w.text=E.getText,w.isXMLDoc=E.isXML,w.contains=E.contains,w.escapeSelector=E.escape;var k=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&w(e).is(n))break;r.push(e)}return r},S=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},D=w.expr.match.needsContext;function N(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}var A=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,t,n){return g(t)?w.grep(e,function(e,r){return!!t.call(e,r,e)!==n}):t.nodeType?w.grep(e,function(e){return e===t!==n}):"string"!=typeof t?w.grep(e,function(e){return u.call(t,e)>-1!==n}):w.filter(t,e,n)}w.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?w.find.matchesSelector(r,e)?[r]:[]:w.find.matches(e,w.grep(t,function(e){return 1===e.nodeType}))},w.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(w(e).filter(function(){for(t=0;t1?w.uniqueSort(n):n},filter:function(e){return this.pushStack(j(this,e||[],!1))},not:function(e){return this.pushStack(j(this,e||[],!0))},is:function(e){return!!j(this,"string"==typeof e&&D.test(e)?w(e):e||[],!1).length}});var q,L=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(w.fn.init=function(e,t,n){var i,o;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(i="<"===e[0]&&">"===e[e.length-1]&&e.length>=3?[null,e,null]:L.exec(e))||!i[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(i[1]){if(t=t instanceof w?t[0]:t,w.merge(this,w.parseHTML(i[1],t&&t.nodeType?t.ownerDocument||t:r,!0)),A.test(i[1])&&w.isPlainObject(t))for(i in t)g(this[i])?this[i](t[i]):this.attr(i,t[i]);return this}return(o=r.getElementById(i[2]))&&(this[0]=o,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):g(e)?void 0!==n.ready?n.ready(e):e(w):w.makeArray(e,this)}).prototype=w.fn,q=w(r);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};w.fn.extend({has:function(e){var t=w(e,this),n=t.length;return this.filter(function(){for(var e=0;e-1:1===n.nodeType&&w.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(o.length>1?w.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?u.call(w(e),this[0]):u.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(w.uniqueSort(w.merge(this.get(),w(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}});function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}w.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return k(e,"parentNode")},parentsUntil:function(e,t,n){return k(e,"parentNode",n)},next:function(e){return P(e,"nextSibling")},prev:function(e){return P(e,"previousSibling")},nextAll:function(e){return k(e,"nextSibling")},prevAll:function(e){return k(e,"previousSibling")},nextUntil:function(e,t,n){return k(e,"nextSibling",n)},prevUntil:function(e,t,n){return k(e,"previousSibling",n)},siblings:function(e){return S((e.parentNode||{}).firstChild,e)},children:function(e){return S(e.firstChild)},contents:function(e){return N(e,"iframe")?e.contentDocument:(N(e,"template")&&(e=e.content||e),w.merge([],e.childNodes))}},function(e,t){w.fn[e]=function(n,r){var i=w.map(this,t,n);return"Until"!==e.slice(-5)&&(r=n),r&&"string"==typeof r&&(i=w.filter(r,i)),this.length>1&&(O[e]||w.uniqueSort(i),H.test(e)&&i.reverse()),this.pushStack(i)}});var M=/[^\x20\t\r\n\f]+/g;function R(e){var t={};return w.each(e.match(M)||[],function(e,n){t[n]=!0}),t}w.Callbacks=function(e){e="string"==typeof e?R(e):w.extend({},e);var t,n,r,i,o=[],a=[],s=-1,u=function(){for(i=i||e.once,r=t=!0;a.length;s=-1){n=a.shift();while(++s-1)o.splice(n,1),n<=s&&s--}),this},has:function(e){return e?w.inArray(e,o)>-1:o.length>0},empty:function(){return o&&(o=[]),this},disable:function(){return i=a=[],o=n="",this},disabled:function(){return!o},lock:function(){return i=a=[],n||t||(o=n=""),this},locked:function(){return!!i},fireWith:function(e,n){return i||(n=[e,(n=n||[]).slice?n.slice():n],a.push(n),t||u()),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!r}};return l};function I(e){return e}function W(e){throw e}function $(e,t,n,r){var i;try{e&&g(i=e.promise)?i.call(e).done(t).fail(n):e&&g(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}w.extend({Deferred:function(t){var n=[["notify","progress",w.Callbacks("memory"),w.Callbacks("memory"),2],["resolve","done",w.Callbacks("once memory"),w.Callbacks("once memory"),0,"resolved"],["reject","fail",w.Callbacks("once memory"),w.Callbacks("once memory"),1,"rejected"]],r="pending",i={state:function(){return r},always:function(){return o.done(arguments).fail(arguments),this},"catch":function(e){return i.then(null,e)},pipe:function(){var e=arguments;return w.Deferred(function(t){w.each(n,function(n,r){var i=g(e[r[4]])&&e[r[4]];o[r[1]](function(){var e=i&&i.apply(this,arguments);e&&g(e.promise)?e.promise().progress(t.notify).done(t.resolve).fail(t.reject):t[r[0]+"With"](this,i?[e]:arguments)})}),e=null}).promise()},then:function(t,r,i){var o=0;function a(t,n,r,i){return function(){var s=this,u=arguments,l=function(){var e,l;if(!(t=o&&(r!==W&&(s=void 0,u=[e]),n.rejectWith(s,u))}};t?c():(w.Deferred.getStackHook&&(c.stackTrace=w.Deferred.getStackHook()),e.setTimeout(c))}}return w.Deferred(function(e){n[0][3].add(a(0,e,g(i)?i:I,e.notifyWith)),n[1][3].add(a(0,e,g(t)?t:I)),n[2][3].add(a(0,e,g(r)?r:W))}).promise()},promise:function(e){return null!=e?w.extend(e,i):i}},o={};return w.each(n,function(e,t){var a=t[2],s=t[5];i[t[1]]=a.add,s&&a.add(function(){r=s},n[3-e][2].disable,n[3-e][3].disable,n[0][2].lock,n[0][3].lock),a.add(t[3].fire),o[t[0]]=function(){return o[t[0]+"With"](this===o?void 0:this,arguments),this},o[t[0]+"With"]=a.fireWith}),i.promise(o),t&&t.call(o,o),o},when:function(e){var t=arguments.length,n=t,r=Array(n),i=o.call(arguments),a=w.Deferred(),s=function(e){return function(n){r[e]=this,i[e]=arguments.length>1?o.call(arguments):n,--t||a.resolveWith(r,i)}};if(t<=1&&($(e,a.done(s(n)).resolve,a.reject,!t),"pending"===a.state()||g(i[n]&&i[n].then)))return a.then();while(n--)$(i[n],s(n),a.reject);return a.promise()}});var B=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;w.Deferred.exceptionHook=function(t,n){e.console&&e.console.warn&&t&&B.test(t.name)&&e.console.warn("jQuery.Deferred exception: "+t.message,t.stack,n)},w.readyException=function(t){e.setTimeout(function(){throw t})};var F=w.Deferred();w.fn.ready=function(e){return F.then(e)["catch"](function(e){w.readyException(e)}),this},w.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--w.readyWait:w.isReady)||(w.isReady=!0,!0!==e&&--w.readyWait>0||F.resolveWith(r,[w]))}}),w.ready.then=F.then;function _(){r.removeEventListener("DOMContentLoaded",_),e.removeEventListener("load",_),w.ready()}"complete"===r.readyState||"loading"!==r.readyState&&!r.documentElement.doScroll?e.setTimeout(w.ready):(r.addEventListener("DOMContentLoaded",_),e.addEventListener("load",_));var z=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===x(n)){i=!0;for(s in n)z(e,t,s,n[s],!0,o,a)}else if(void 0!==r&&(i=!0,g(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(w(e),n)})),t))for(;s1,null,!0)},removeData:function(e){return this.each(function(){K.remove(this,e)})}}),w.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=J.get(e,t),n&&(!r||Array.isArray(n)?r=J.access(e,t,w.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=w.queue(e,t),r=n.length,i=n.shift(),o=w._queueHooks(e,t),a=function(){w.dequeue(e,t)};"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,a,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return J.get(e,n)||J.access(e,n,{empty:w.Callbacks("once memory").add(function(){J.remove(e,[t+"queue",n])})})}}),w.fn.extend({queue:function(e,t){var n=2;return"string"!=typeof e&&(t=e,e="fx",n--),arguments.length\x20\t\r\n\f]+)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,""],thead:[1,"","
    "],col:[2,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],_default:[0,"",""]};ge.optgroup=ge.option,ge.tbody=ge.tfoot=ge.colgroup=ge.caption=ge.thead,ge.th=ge.td;function ye(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&N(e,t)?w.merge([e],n):n}function ve(e,t){for(var n=0,r=e.length;n-1)i&&i.push(o);else if(l=w.contains(o.ownerDocument,o),a=ye(f.appendChild(o),"script"),l&&ve(a),n){c=0;while(o=a[c++])he.test(o.type||"")&&n.push(o)}return f}!function(){var e=r.createDocumentFragment().appendChild(r.createElement("div")),t=r.createElement("input");t.setAttribute("type","radio"),t.setAttribute("checked","checked"),t.setAttribute("name","t"),e.appendChild(t),h.checkClone=e.cloneNode(!0).cloneNode(!0).lastChild.checked,e.innerHTML="",h.noCloneChecked=!!e.cloneNode(!0).lastChild.defaultValue}();var be=r.documentElement,we=/^key/,Te=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ce=/^([^.]*)(?:\.(.+)|)/;function Ee(){return!0}function ke(){return!1}function Se(){try{return r.activeElement}catch(e){}}function De(e,t,n,r,i,o){var a,s;if("object"==typeof t){"string"!=typeof n&&(r=r||n,n=void 0);for(s in t)De(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=ke;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return w().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=w.guid++)),e.each(function(){w.event.add(this,t,i,r,n)})}w.event={global:{},add:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,y=J.get(e);if(y){n.handler&&(n=(o=n).handler,i=o.selector),i&&w.find.matchesSelector(be,i),n.guid||(n.guid=w.guid++),(u=y.events)||(u=y.events={}),(a=y.handle)||(a=y.handle=function(t){return"undefined"!=typeof w&&w.event.triggered!==t.type?w.event.dispatch.apply(e,arguments):void 0}),l=(t=(t||"").match(M)||[""]).length;while(l--)d=g=(s=Ce.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=w.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=w.event.special[d]||{},c=w.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&w.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(e,r,h,a)||e.addEventListener&&e.addEventListener(d,a)),f.add&&(f.add.call(e,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),w.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,y=J.hasData(e)&&J.get(e);if(y&&(u=y.events)){l=(t=(t||"").match(M)||[""]).length;while(l--)if(s=Ce.exec(t[l])||[],d=g=s[1],h=(s[2]||"").split(".").sort(),d){f=w.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,y.handle)||w.removeEvent(e,d,y.handle),delete u[d])}else for(d in u)w.event.remove(e,d+t[l],n,r,!0);w.isEmptyObject(u)&&J.remove(e,"handle events")}},dispatch:function(e){var t=w.event.fix(e),n,r,i,o,a,s,u=new Array(arguments.length),l=(J.get(this,"events")||{})[t.type]||[],c=w.event.special[t.type]||{};for(u[0]=t,n=1;n=1))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n-1:w.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u\x20\t\r\n\f]*)[^>]*)\/>/gi,Ae=/\s*$/g;function Le(e,t){return N(e,"table")&&N(11!==t.nodeType?t:t.firstChild,"tr")?w(e).children("tbody")[0]||e:e}function He(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Oe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Pe(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(J.hasData(e)&&(o=J.access(e),a=J.set(t,o),l=o.events)){delete a.handle,a.events={};for(i in l)for(n=0,r=l[i].length;n1&&"string"==typeof y&&!h.checkClone&&je.test(y))return e.each(function(i){var o=e.eq(i);v&&(t[0]=y.call(this,i,o.html())),Re(o,t,n,r)});if(p&&(i=xe(t,e[0].ownerDocument,!1,e,r),o=i.firstChild,1===i.childNodes.length&&(i=o),o||r)){for(u=(s=w.map(ye(i,"script"),He)).length;f")},clone:function(e,t,n){var r,i,o,a,s=e.cloneNode(!0),u=w.contains(e.ownerDocument,e);if(!(h.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||w.isXMLDoc(e)))for(a=ye(s),r=0,i=(o=ye(e)).length;r0&&ve(a,!u&&ye(e,"script")),s},cleanData:function(e){for(var t,n,r,i=w.event.special,o=0;void 0!==(n=e[o]);o++)if(Y(n)){if(t=n[J.expando]){if(t.events)for(r in t.events)i[r]?w.event.remove(n,r):w.removeEvent(n,r,t.handle);n[J.expando]=void 0}n[K.expando]&&(n[K.expando]=void 0)}}}),w.fn.extend({detach:function(e){return Ie(this,e,!0)},remove:function(e){return Ie(this,e)},text:function(e){return z(this,function(e){return void 0===e?w.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return Re(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Le(this,e).appendChild(e)})},prepend:function(){return Re(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Le(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return Re(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return Re(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(w.cleanData(ye(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return w.clone(this,e,t)})},html:function(e){return z(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Ae.test(e)&&!ge[(de.exec(e)||["",""])[1].toLowerCase()]){e=w.htmlPrefilter(e);try{for(;n=0&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))),u}function et(e,t,n){var r=$e(e),i=Fe(e,t,r),o="border-box"===w.css(e,"boxSizing",!1,r),a=o;if(We.test(i)){if(!n)return i;i="auto"}return a=a&&(h.boxSizingReliable()||i===e.style[t]),("auto"===i||!parseFloat(i)&&"inline"===w.css(e,"display",!1,r))&&(i=e["offset"+t[0].toUpperCase()+t.slice(1)],a=!0),(i=parseFloat(i)||0)+Ze(e,t,n||(o?"border":"content"),a,r,i)+"px"}w.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Fe(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=G(t),u=Xe.test(t),l=e.style;if(u||(t=Je(s)),a=w.cssHooks[t]||w.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"==(o=typeof n)&&(i=ie.exec(n))&&i[1]&&(n=ue(e,t,i),o="number"),null!=n&&n===n&&("number"===o&&(n+=i&&i[3]||(w.cssNumber[s]?"":"px")),h.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=G(t);return Xe.test(t)||(t=Je(s)),(a=w.cssHooks[t]||w.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=Fe(e,t,r)),"normal"===i&&t in Ve&&(i=Ve[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),w.each(["height","width"],function(e,t){w.cssHooks[t]={get:function(e,n,r){if(n)return!ze.test(w.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?et(e,t,r):se(e,Ue,function(){return et(e,t,r)})},set:function(e,n,r){var i,o=$e(e),a="border-box"===w.css(e,"boxSizing",!1,o),s=r&&Ze(e,t,r,a,o);return a&&h.scrollboxSize()===o.position&&(s-=Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-parseFloat(o[t])-Ze(e,t,"border",!1,o)-.5)),s&&(i=ie.exec(n))&&"px"!==(i[3]||"px")&&(e.style[t]=n,n=w.css(e,t)),Ke(e,n,s)}}}),w.cssHooks.marginLeft=_e(h.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Fe(e,"marginLeft"))||e.getBoundingClientRect().left-se(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),w.each({margin:"",padding:"",border:"Width"},function(e,t){w.cssHooks[e+t]={expand:function(n){for(var r=0,i={},o="string"==typeof n?n.split(" "):[n];r<4;r++)i[e+oe[r]+t]=o[r]||o[r-2]||o[0];return i}},"margin"!==e&&(w.cssHooks[e+t].set=Ke)}),w.fn.extend({css:function(e,t){return z(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=$e(e),i=t.length;a1)}});function tt(e,t,n,r,i){return new tt.prototype.init(e,t,n,r,i)}w.Tween=tt,tt.prototype={constructor:tt,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||w.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(w.cssNumber[n]?"":"px")},cur:function(){var e=tt.propHooks[this.prop];return e&&e.get?e.get(this):tt.propHooks._default.get(this)},run:function(e){var t,n=tt.propHooks[this.prop];return this.options.duration?this.pos=t=w.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):tt.propHooks._default.set(this),this}},tt.prototype.init.prototype=tt.prototype,tt.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=w.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){w.fx.step[e.prop]?w.fx.step[e.prop](e):1!==e.elem.nodeType||null==e.elem.style[w.cssProps[e.prop]]&&!w.cssHooks[e.prop]?e.elem[e.prop]=e.now:w.style(e.elem,e.prop,e.now+e.unit)}}},tt.propHooks.scrollTop=tt.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},w.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},w.fx=tt.prototype.init,w.fx.step={};var nt,rt,it=/^(?:toggle|show|hide)$/,ot=/queueHooks$/;function at(){rt&&(!1===r.hidden&&e.requestAnimationFrame?e.requestAnimationFrame(at):e.setTimeout(at,w.fx.interval),w.fx.tick())}function st(){return e.setTimeout(function(){nt=void 0}),nt=Date.now()}function ut(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=oe[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function lt(e,t,n){for(var r,i=(pt.tweeners[t]||[]).concat(pt.tweeners["*"]),o=0,a=i.length;o1)},removeAttr:function(e){return this.each(function(){w.removeAttr(this,e)})}}),w.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?w.prop(e,t,n):(1===o&&w.isXMLDoc(e)||(i=w.attrHooks[t.toLowerCase()]||(w.expr.match.bool.test(t)?dt:void 0)),void 0!==n?null===n?void w.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=w.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!h.radioValue&&"radio"===t&&N(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(M);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),dt={set:function(e,t,n){return!1===t?w.removeAttr(e,n):e.setAttribute(n,n),n}},w.each(w.expr.match.bool.source.match(/\w+/g),function(e,t){var n=ht[t]||w.find.attr;ht[t]=function(e,t,r){var i,o,a=t.toLowerCase();return r||(o=ht[a],ht[a]=i,i=null!=n(e,t,r)?a:null,ht[a]=o),i}});var gt=/^(?:input|select|textarea|button)$/i,yt=/^(?:a|area)$/i;w.fn.extend({prop:function(e,t){return z(this,w.prop,e,t,arguments.length>1)},removeProp:function(e){return this.each(function(){delete this[w.propFix[e]||e]})}}),w.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&w.isXMLDoc(e)||(t=w.propFix[t]||t,i=w.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=w.find.attr(e,"tabindex");return t?parseInt(t,10):gt.test(e.nodeName)||yt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),h.optSelected||(w.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),w.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){w.propFix[this.toLowerCase()]=this});function vt(e){return(e.match(M)||[]).join(" ")}function mt(e){return e.getAttribute&&e.getAttribute("class")||""}function xt(e){return Array.isArray(e)?e:"string"==typeof e?e.match(M)||[]:[]}w.fn.extend({addClass:function(e){var t,n,r,i,o,a,s,u=0;if(g(e))return this.each(function(t){w(this).addClass(e.call(this,t,mt(this)))});if((t=xt(e)).length)while(n=this[u++])if(i=mt(n),r=1===n.nodeType&&" "+vt(i)+" "){a=0;while(o=t[a++])r.indexOf(" "+o+" ")<0&&(r+=o+" ");i!==(s=vt(r))&&n.setAttribute("class",s)}return this},removeClass:function(e){var t,n,r,i,o,a,s,u=0;if(g(e))return this.each(function(t){w(this).removeClass(e.call(this,t,mt(this)))});if(!arguments.length)return this.attr("class","");if((t=xt(e)).length)while(n=this[u++])if(i=mt(n),r=1===n.nodeType&&" "+vt(i)+" "){a=0;while(o=t[a++])while(r.indexOf(" "+o+" ")>-1)r=r.replace(" "+o+" "," ");i!==(s=vt(r))&&n.setAttribute("class",s)}return this},toggleClass:function(e,t){var n=typeof e,r="string"===n||Array.isArray(e);return"boolean"==typeof t&&r?t?this.addClass(e):this.removeClass(e):g(e)?this.each(function(n){w(this).toggleClass(e.call(this,n,mt(this),t),t)}):this.each(function(){var t,i,o,a;if(r){i=0,o=w(this),a=xt(e);while(t=a[i++])o.hasClass(t)?o.removeClass(t):o.addClass(t)}else void 0!==e&&"boolean"!==n||((t=mt(this))&&J.set(this,"__className__",t),this.setAttribute&&this.setAttribute("class",t||!1===e?"":J.get(this,"__className__")||""))})},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&(" "+vt(mt(n))+" ").indexOf(t)>-1)return!0;return!1}});var bt=/\r/g;w.fn.extend({val:function(e){var t,n,r,i=this[0];{if(arguments.length)return r=g(e),this.each(function(n){var i;1===this.nodeType&&(null==(i=r?e.call(this,n,w(this).val()):e)?i="":"number"==typeof i?i+="":Array.isArray(i)&&(i=w.map(i,function(e){return null==e?"":e+""})),(t=w.valHooks[this.type]||w.valHooks[this.nodeName.toLowerCase()])&&"set"in t&&void 0!==t.set(this,i,"value")||(this.value=i))});if(i)return(t=w.valHooks[i.type]||w.valHooks[i.nodeName.toLowerCase()])&&"get"in t&&void 0!==(n=t.get(i,"value"))?n:"string"==typeof(n=i.value)?n.replace(bt,""):null==n?"":n}}}),w.extend({valHooks:{option:{get:function(e){var t=w.find.attr(e,"value");return null!=t?t:vt(w.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r-1)&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),w.each(["radio","checkbox"],function(){w.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=w.inArray(w(e).val(),t)>-1}},h.checkOn||(w.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})}),h.focusin="onfocusin"in e;var wt=/^(?:focusinfocus|focusoutblur)$/,Tt=function(e){e.stopPropagation()};w.extend(w.event,{trigger:function(t,n,i,o){var a,s,u,l,c,p,d,h,v=[i||r],m=f.call(t,"type")?t.type:t,x=f.call(t,"namespace")?t.namespace.split("."):[];if(s=h=u=i=i||r,3!==i.nodeType&&8!==i.nodeType&&!wt.test(m+w.event.triggered)&&(m.indexOf(".")>-1&&(m=(x=m.split(".")).shift(),x.sort()),c=m.indexOf(":")<0&&"on"+m,t=t[w.expando]?t:new w.Event(m,"object"==typeof t&&t),t.isTrigger=o?2:3,t.namespace=x.join("."),t.rnamespace=t.namespace?new RegExp("(^|\\.)"+x.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,t.result=void 0,t.target||(t.target=i),n=null==n?[t]:w.makeArray(n,[t]),d=w.event.special[m]||{},o||!d.trigger||!1!==d.trigger.apply(i,n))){if(!o&&!d.noBubble&&!y(i)){for(l=d.delegateType||m,wt.test(l+m)||(s=s.parentNode);s;s=s.parentNode)v.push(s),u=s;u===(i.ownerDocument||r)&&v.push(u.defaultView||u.parentWindow||e)}a=0;while((s=v[a++])&&!t.isPropagationStopped())h=s,t.type=a>1?l:d.bindType||m,(p=(J.get(s,"events")||{})[t.type]&&J.get(s,"handle"))&&p.apply(s,n),(p=c&&s[c])&&p.apply&&Y(s)&&(t.result=p.apply(s,n),!1===t.result&&t.preventDefault());return t.type=m,o||t.isDefaultPrevented()||d._default&&!1!==d._default.apply(v.pop(),n)||!Y(i)||c&&g(i[m])&&!y(i)&&((u=i[c])&&(i[c]=null),w.event.triggered=m,t.isPropagationStopped()&&h.addEventListener(m,Tt),i[m](),t.isPropagationStopped()&&h.removeEventListener(m,Tt),w.event.triggered=void 0,u&&(i[c]=u)),t.result}},simulate:function(e,t,n){var r=w.extend(new w.Event,n,{type:e,isSimulated:!0});w.event.trigger(r,null,t)}}),w.fn.extend({trigger:function(e,t){return this.each(function(){w.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return w.event.trigger(e,t,n,!0)}}),h.focusin||w.each({focus:"focusin",blur:"focusout"},function(e,t){var n=function(e){w.event.simulate(t,e.target,w.event.fix(e))};w.event.special[t]={setup:function(){var r=this.ownerDocument||this,i=J.access(r,t);i||r.addEventListener(e,n,!0),J.access(r,t,(i||0)+1)},teardown:function(){var r=this.ownerDocument||this,i=J.access(r,t)-1;i?J.access(r,t,i):(r.removeEventListener(e,n,!0),J.remove(r,t))}}});var Ct=e.location,Et=Date.now(),kt=/\?/;w.parseXML=function(t){var n;if(!t||"string"!=typeof t)return null;try{n=(new e.DOMParser).parseFromString(t,"text/xml")}catch(e){n=void 0}return n&&!n.getElementsByTagName("parsererror").length||w.error("Invalid XML: "+t),n};var St=/\[\]$/,Dt=/\r?\n/g,Nt=/^(?:submit|button|image|reset|file)$/i,At=/^(?:input|select|textarea|keygen)/i;function jt(e,t,n,r){var i;if(Array.isArray(t))w.each(t,function(t,i){n||St.test(e)?r(e,i):jt(e+"["+("object"==typeof i&&null!=i?t:"")+"]",i,n,r)});else if(n||"object"!==x(t))r(e,t);else for(i in t)jt(e+"["+i+"]",t[i],n,r)}w.param=function(e,t){var n,r=[],i=function(e,t){var n=g(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(Array.isArray(e)||e.jquery&&!w.isPlainObject(e))w.each(e,function(){i(this.name,this.value)});else for(n in e)jt(n,e[n],t,i);return r.join("&")},w.fn.extend({serialize:function(){return w.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=w.prop(this,"elements");return e?w.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!w(this).is(":disabled")&&At.test(this.nodeName)&&!Nt.test(e)&&(this.checked||!pe.test(e))}).map(function(e,t){var n=w(this).val();return null==n?null:Array.isArray(n)?w.map(n,function(e){return{name:t.name,value:e.replace(Dt,"\r\n")}}):{name:t.name,value:n.replace(Dt,"\r\n")}}).get()}});var qt=/%20/g,Lt=/#.*$/,Ht=/([?&])_=[^&]*/,Ot=/^(.*?):[ \t]*([^\r\n]*)$/gm,Pt=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Mt=/^(?:GET|HEAD)$/,Rt=/^\/\//,It={},Wt={},$t="*/".concat("*"),Bt=r.createElement("a");Bt.href=Ct.href;function Ft(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var r,i=0,o=t.toLowerCase().match(M)||[];if(g(n))while(r=o[i++])"+"===r[0]?(r=r.slice(1)||"*",(e[r]=e[r]||[]).unshift(n)):(e[r]=e[r]||[]).push(n)}}function _t(e,t,n,r){var i={},o=e===Wt;function a(s){var u;return i[s]=!0,w.each(e[s]||[],function(e,s){var l=s(t,n,r);return"string"!=typeof l||o||i[l]?o?!(u=l):void 0:(t.dataTypes.unshift(l),a(l),!1)}),u}return a(t.dataTypes[0])||!i["*"]&&a("*")}function zt(e,t){var n,r,i=w.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&w.extend(!0,e,r),e}function Xt(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}function Ut(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}w.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Ct.href,type:"GET",isLocal:Pt.test(Ct.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":$t,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":w.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?zt(zt(e,w.ajaxSettings),t):zt(w.ajaxSettings,e)},ajaxPrefilter:Ft(It),ajaxTransport:Ft(Wt),ajax:function(t,n){"object"==typeof t&&(n=t,t=void 0),n=n||{};var i,o,a,s,u,l,c,f,p,d,h=w.ajaxSetup({},n),g=h.context||h,y=h.context&&(g.nodeType||g.jquery)?w(g):w.event,v=w.Deferred(),m=w.Callbacks("once memory"),x=h.statusCode||{},b={},T={},C="canceled",E={readyState:0,getResponseHeader:function(e){var t;if(c){if(!s){s={};while(t=Ot.exec(a))s[t[1].toLowerCase()]=t[2]}t=s[e.toLowerCase()]}return null==t?null:t},getAllResponseHeaders:function(){return c?a:null},setRequestHeader:function(e,t){return null==c&&(e=T[e.toLowerCase()]=T[e.toLowerCase()]||e,b[e]=t),this},overrideMimeType:function(e){return null==c&&(h.mimeType=e),this},statusCode:function(e){var t;if(e)if(c)E.always(e[E.status]);else for(t in e)x[t]=[x[t],e[t]];return this},abort:function(e){var t=e||C;return i&&i.abort(t),k(0,t),this}};if(v.promise(E),h.url=((t||h.url||Ct.href)+"").replace(Rt,Ct.protocol+"//"),h.type=n.method||n.type||h.method||h.type,h.dataTypes=(h.dataType||"*").toLowerCase().match(M)||[""],null==h.crossDomain){l=r.createElement("a");try{l.href=h.url,l.href=l.href,h.crossDomain=Bt.protocol+"//"+Bt.host!=l.protocol+"//"+l.host}catch(e){h.crossDomain=!0}}if(h.data&&h.processData&&"string"!=typeof h.data&&(h.data=w.param(h.data,h.traditional)),_t(It,h,n,E),c)return E;(f=w.event&&h.global)&&0==w.active++&&w.event.trigger("ajaxStart"),h.type=h.type.toUpperCase(),h.hasContent=!Mt.test(h.type),o=h.url.replace(Lt,""),h.hasContent?h.data&&h.processData&&0===(h.contentType||"").indexOf("application/x-www-form-urlencoded")&&(h.data=h.data.replace(qt,"+")):(d=h.url.slice(o.length),h.data&&(h.processData||"string"==typeof h.data)&&(o+=(kt.test(o)?"&":"?")+h.data,delete h.data),!1===h.cache&&(o=o.replace(Ht,"$1"),d=(kt.test(o)?"&":"?")+"_="+Et+++d),h.url=o+d),h.ifModified&&(w.lastModified[o]&&E.setRequestHeader("If-Modified-Since",w.lastModified[o]),w.etag[o]&&E.setRequestHeader("If-None-Match",w.etag[o])),(h.data&&h.hasContent&&!1!==h.contentType||n.contentType)&&E.setRequestHeader("Content-Type",h.contentType),E.setRequestHeader("Accept",h.dataTypes[0]&&h.accepts[h.dataTypes[0]]?h.accepts[h.dataTypes[0]]+("*"!==h.dataTypes[0]?", "+$t+"; q=0.01":""):h.accepts["*"]);for(p in h.headers)E.setRequestHeader(p,h.headers[p]);if(h.beforeSend&&(!1===h.beforeSend.call(g,E,h)||c))return E.abort();if(C="abort",m.add(h.complete),E.done(h.success),E.fail(h.error),i=_t(Wt,h,n,E)){if(E.readyState=1,f&&y.trigger("ajaxSend",[E,h]),c)return E;h.async&&h.timeout>0&&(u=e.setTimeout(function(){E.abort("timeout")},h.timeout));try{c=!1,i.send(b,k)}catch(e){if(c)throw e;k(-1,e)}}else k(-1,"No Transport");function k(t,n,r,s){var l,p,d,b,T,C=n;c||(c=!0,u&&e.clearTimeout(u),i=void 0,a=s||"",E.readyState=t>0?4:0,l=t>=200&&t<300||304===t,r&&(b=Xt(h,E,r)),b=Ut(h,b,E,l),l?(h.ifModified&&((T=E.getResponseHeader("Last-Modified"))&&(w.lastModified[o]=T),(T=E.getResponseHeader("etag"))&&(w.etag[o]=T)),204===t||"HEAD"===h.type?C="nocontent":304===t?C="notmodified":(C=b.state,p=b.data,l=!(d=b.error))):(d=C,!t&&C||(C="error",t<0&&(t=0))),E.status=t,E.statusText=(n||C)+"",l?v.resolveWith(g,[p,C,E]):v.rejectWith(g,[E,C,d]),E.statusCode(x),x=void 0,f&&y.trigger(l?"ajaxSuccess":"ajaxError",[E,h,l?p:d]),m.fireWith(g,[E,C]),f&&(y.trigger("ajaxComplete",[E,h]),--w.active||w.event.trigger("ajaxStop")))}return E},getJSON:function(e,t,n){return w.get(e,t,n,"json")},getScript:function(e,t){return w.get(e,void 0,t,"script")}}),w.each(["get","post"],function(e,t){w[t]=function(e,n,r,i){return g(n)&&(i=i||r,r=n,n=void 0),w.ajax(w.extend({url:e,type:t,dataType:i,data:n,success:r},w.isPlainObject(e)&&e))}}),w._evalUrl=function(e){return w.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,"throws":!0})},w.fn.extend({wrapAll:function(e){var t;return this[0]&&(g(e)&&(e=e.call(this[0])),t=w(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(e){return g(e)?this.each(function(t){w(this).wrapInner(e.call(this,t))}):this.each(function(){var t=w(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=g(e);return this.each(function(n){w(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(e){return this.parent(e).not("body").each(function(){w(this).replaceWith(this.childNodes)}),this}}),w.expr.pseudos.hidden=function(e){return!w.expr.pseudos.visible(e)},w.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},w.ajaxSettings.xhr=function(){try{return new e.XMLHttpRequest}catch(e){}};var Vt={0:200,1223:204},Gt=w.ajaxSettings.xhr();h.cors=!!Gt&&"withCredentials"in Gt,h.ajax=Gt=!!Gt,w.ajaxTransport(function(t){var n,r;if(h.cors||Gt&&!t.crossDomain)return{send:function(i,o){var a,s=t.xhr();if(s.open(t.type,t.url,t.async,t.username,t.password),t.xhrFields)for(a in t.xhrFields)s[a]=t.xhrFields[a];t.mimeType&&s.overrideMimeType&&s.overrideMimeType(t.mimeType),t.crossDomain||i["X-Requested-With"]||(i["X-Requested-With"]="XMLHttpRequest");for(a in i)s.setRequestHeader(a,i[a]);n=function(e){return function(){n&&(n=r=s.onload=s.onerror=s.onabort=s.ontimeout=s.onreadystatechange=null,"abort"===e?s.abort():"error"===e?"number"!=typeof s.status?o(0,"error"):o(s.status,s.statusText):o(Vt[s.status]||s.status,s.statusText,"text"!==(s.responseType||"text")||"string"!=typeof s.responseText?{binary:s.response}:{text:s.responseText},s.getAllResponseHeaders()))}},s.onload=n(),r=s.onerror=s.ontimeout=n("error"),void 0!==s.onabort?s.onabort=r:s.onreadystatechange=function(){4===s.readyState&&e.setTimeout(function(){n&&r()})},n=n("abort");try{s.send(t.hasContent&&t.data||null)}catch(e){if(n)throw e}},abort:function(){n&&n()}}}),w.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),w.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return w.globalEval(e),e}}}),w.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),w.ajaxTransport("script",function(e){if(e.crossDomain){var t,n;return{send:function(i,o){t=w("