diff --git a/.forgejo/workflows/build-on-commit.yml b/.forgejo/workflows/build-on-commit.yml new file mode 100644 index 00000000..e8f0d2e3 --- /dev/null +++ b/.forgejo/workflows/build-on-commit.yml @@ -0,0 +1,40 @@ +name: Build Docker Image on Commit + +on: + push: + branches: + - main + tags: + - '!' # Exclude tags + +jobs: + build-and-publish: + runs-on: docker-builder + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set REPO_VARS + id: repo-url + run: | + echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV + echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV + + - name: Login to OCI registry + run: | + echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin + + - name: Build and push Docker images + run: | + # Build Docker image with commit SHA + docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} . + docker push $REPO_HOST/$REPO_PATH:${{ github.sha }} + + # Build Docker image with nightly tag + docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly + docker push $REPO_HOST/$REPO_PATH:nightly + + # Remove local images to save storage + docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }} + docker rmi $REPO_HOST/$REPO_PATH:nightly diff --git a/.forgejo/workflows/build-on-tag.yml b/.forgejo/workflows/build-on-tag.yml new file mode 100644 index 00000000..888102b6 --- /dev/null +++ b/.forgejo/workflows/build-on-tag.yml @@ -0,0 +1,37 @@ +name: Build and Publish Docker Image on Tag + +on: + push: + tags: + - '*' + +jobs: + build-and-publish: + runs-on: docker-builder + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set REPO_VARS + id: repo-url + run: | + echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV + echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV + + - name: Login to OCI registry + run: | + echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin + + - name: Build and push Docker image + run: | + TAG=${{ github.ref_name }} # Get the tag name from the context + # Build and push multi-platform Docker images + docker build -t $REPO_HOST/$REPO_PATH:$TAG --push . + # Tag and push latest + docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest + docker push $REPO_HOST/$REPO_PATH:latest + + # Remove the local image to save storage + docker rmi $REPO_HOST/$REPO_PATH:$TAG + docker rmi $REPO_HOST/$REPO_PATH:latest \ No newline at end of file diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index db8c40a5..aab991d5 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1 +1,10 @@ -custom: https://zeronet.io/docs/help_zeronet/donate/ +github: canewsin +patreon: # Replace with a single Patreon username e.g., user1 +open_collective: # Replace with a single Open Collective username e.g., user1 +ko_fi: canewsin +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: canewsin +issuehunt: # Replace with a single IssueHunt username e.g., user1 +otechie: # Replace with a single Otechie username e.g., user1 +custom: ['https://paypal.me/PramUkesh', 'https://zerolink.ml/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/'] diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..27b5c924 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,72 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ py3-latest ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ py3-latest ] + schedule: + - cron: '32 19 * * 2' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'javascript', 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..2bdcaf95 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,51 @@ +name: tests + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-20.04 + strategy: + max-parallel: 16 + matrix: + python-version: ["3.7", "3.8", "3.9"] + + steps: + - name: Checkout ZeroNet + uses: actions/checkout@v2 + with: + submodules: "true" + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Prepare for installation + run: | + python3 -m pip install setuptools + python3 -m pip install --upgrade pip wheel + python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium + + - name: Install + run: | + python3 -m pip install --upgrade -r requirements.txt + python3 -m pip list + + - name: Prepare for tests + run: | + openssl version -a + echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6 + + - name: Test + run: | + catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini + export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test + export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test + export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test + export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test + export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test + export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test + find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" + find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" + flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ diff --git a/.gitignore b/.gitignore index 057c422a..636cd115 100644 --- a/.gitignore +++ b/.gitignore @@ -7,9 +7,14 @@ __pycache__/ # Hidden files .* +!/.forgejo +!/.github !/.gitignore !/.travis.yml +!/.gitlab-ci.yml +# Temporary files +*.bak # Data dir data/* @@ -26,3 +31,6 @@ tools/phantomjs # ZeroNet config file zeronet.conf + +# ZeroNet log files +log/* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000..f3e1ed29 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,48 @@ +stages: + - test + +.test_template: &test_template + stage: test + before_script: + - pip install --upgrade pip wheel + # Selenium and requests can't be installed without a requests hint on Python 3.4 + - pip install --upgrade requests>=2.22.0 + - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium + - pip install --upgrade -r requirements.txt + script: + - pip list + - openssl version -a + - python -m pytest -x plugins/CryptMessage/Test --color=yes + - python -m pytest -x plugins/Bigfile/Test --color=yes + - python -m pytest -x plugins/AnnounceLocal/Test --color=yes + - python -m pytest -x plugins/OptionalManager/Test --color=yes + - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini --color=yes + - mv plugins/disabled-Multiuser plugins/Multiuser + - python -m pytest -x plugins/Multiuser/Test --color=yes + - mv plugins/disabled-Bootstrapper plugins/Bootstrapper + - python -m pytest -x plugins/Bootstrapper/Test --color=yes + - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ + +test:py3.4: + image: python:3.4.3 + <<: *test_template + +test:py3.5: + image: python:3.5.7 + <<: *test_template + +test:py3.6: + image: python:3.6.9 + <<: *test_template + +test:py3.7-openssl1.1.0: + image: python:3.7.0b5 + <<: *test_template + +test:py3.7-openssl1.1.1: + image: python:3.7.4 + <<: *test_template + +test:py3.8: + image: python:3.8.0b3 + <<: *test_template \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..2c602a5a --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "plugins"] + path = plugins + url = https://github.com/ZeroNetX/ZeroNet-Plugins.git diff --git a/.travis.yml b/.travis.yml index 9f214a3f..bdaafa22 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,20 @@ language: python python: - - 2.7 + - 3.4 + - 3.5 + - 3.6 + - 3.7 + - 3.8 services: - docker +cache: pip +before_install: + - pip install --upgrade pip wheel + - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium + # - docker build -t zeronet . + # - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet install: - - pip install -U pip wheel - - pip install -r requirements.txt + - pip install --upgrade -r requirements.txt - pip list before_script: - openssl version -a @@ -15,23 +24,22 @@ before_script: sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6'; fi script: - - python -m pytest -x plugins/CryptMessage/Test - - python -m pytest -x plugins/Bigfile/Test - - python -m pytest -x plugins/AnnounceLocal/Test - - python -m pytest -x plugins/OptionalManager/Test - - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini -before_install: - - pip install -U pytest mock pytest-cov selenium - - pip install codecov - - pip install coveralls - - docker build -t zeronet . - - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet + - catchsegv python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini + - export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python -m pytest -x plugins/CryptMessage/Test + - export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python -m pytest -x plugins/Bigfile/Test + - export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python -m pytest -x plugins/AnnounceLocal/Test + - export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python -m pytest -x plugins/OptionalManager/Test + - export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test + - export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test + - find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" + - find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" + - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ +after_failure: + - zip -r log.zip log/ + - curl --upload-file ./log.zip https://transfer.sh/log.zip after_success: - codecov - coveralls --rcfile=src/Test/coverage.ini -cache: - directories: - - $HOME/.cache/pip notifications: email: recipients: diff --git a/CHANGELOG-zh-cn.md b/CHANGELOG-zh-cn.md deleted file mode 100644 index c09ca401..00000000 --- a/CHANGELOG-zh-cn.md +++ /dev/null @@ -1,134 +0,0 @@ -## ZeroNet 0.5.1 (2016-11-18) -### 新增 -- 多语言界面 -- 新插件:为站点 HTML 与 JS 文件提供的翻译助手 -- 每个站点独立的 favicon - -### 修复 -- 并行可选文件下载 - -## ZeroNet 0.5.0 (2016-11-08) -### 新增 -- 新插件:允许在 ZeroHello 列出/删除/固定/管理文件 -- 新的 API 命令来关注用户的可选文件,与可选文件的请求统计 -- 新的可选文件总大小限制 -- 新插件:保存节点到数据库并在重启时保持它们,使得更快的可选文件搜索以及在没有 Tracker 的情况下工作 -- 重写 UPnP 端口打开器 + 退出时关闭端口(感谢 sirMackk!) -- 通过懒惰 PeerHashfield 创建来减少内存占用 -- 在 /Stats 页面加载 JSON 文件统计与数据库信息 - -### 更改 -- 独立的锁定文件来获得更好的 Windows 兼容性 -- 当执行 start.py 时,即使 ZeroNet 已经运行也打开浏览器 -- 在重载时保持插件顺序来允许插件扩展另一个插件 -- 只在完整加载 sites.json 时保存来避免数据丢失 -- 将更多的 Tracker 更改为更可靠的 Tracker -- 更少的 findhashid CPU 使用率 -- 合并下载大量可选文件 -- 更多对于可选文件的其他优化 -- 如果一个站点有 1000 个节点,更积极地清理 -- 为验证错误使用警告而不是错误 -- 首先推送更新到更新的客户端 -- 损坏文件重置改进 - -### 修复 -- 修复启动时出现的站点删除错误 -- 延迟 WebSocket 消息直到连接上 -- 修复如果文件包含额外数据时的数据库导入 -- 修复大站点下载 -- 修复 diff 发送 bug (跟踪它好长时间了) -- 修复当 JSON 文件包含 [] 字符时随机出现的发布错误 -- 修复 siteDelete 与 siteCreate bug -- 修复文件写入确认对话框 - - -## ZeroNet 0.4.1 (2016-09-05) -### 新增 -- 更快启动与更少内存使用的内核改变 -- 尝试连接丢失时重新连接 Tor -- 侧边栏滑入 -- 尝试避免不完整的数据文件被覆盖 -- 更快地打开数据库 -- 在侧边栏显示用户文件大小 -- 依赖 --connection_limit 的并发 worker 数量 - - -### 更改 -- 在空闲 5 分钟后关闭数据库 -- 更好的站点大小计算 -- 允许在域名中使用“-”符号 -- 总是尝试为站点保持连接 -- 移除已合并站点的合并权限 -- 只扫描最后 3 天的新闻源来加快数据库请求 -- 更新 ZeroBundle-win 到 Python 2.7.12 - - -### 修复 -- 修复重要的安全问题:允许任意用户无需有效的来自 ID 提供者的证书发布新内容,感谢 Kaffie 指出 -- 修复在没有选择提供证书提供者时的侧边栏错误 -- 在数据库重建时跳过无效文件 -- 修复随机弹出的 WebSocket 连接错误 -- 修复新的 siteCreate 命令 -- 修复站点大小计算 -- 修复计算机唤醒后的端口打开检查 -- 修复 --size_limit 的命令行解析 - - -## ZeroNet 0.4.0 (2016-08-11) -### 新增 -- 合并站点插件 -- Live source code reloading: Faster core development by allowing me to make changes in ZeroNet source code without restarting it. -- 为合并站点设计的新 JSON 表 -- 从侧边栏重建数据库 -- 允许直接在 JSON 表中存储自定义数据:更简单与快速的 SQL 查询 -- 用户文件存档:允许站点拥有者存档不活跃的用户内容到单个文件(减少初始同步的时间/CPU/内存使用率) -- 在文件删除时同时触发数据库 onUpdated/update -- 从 ZeroFrame API 请求权限 -- 允许使用 fileWrite API 命令在 content.json 存储额外数据 -- 更快的可选文件下载 -- 使用替代源 (Gogs, Gitlab) 来下载更新 -- Track provided sites/connection and prefer to keep the ones with more sites to reduce connection number - -### 更改 -- 保持每个站点至少 5 个连接 -- 将目标站点连接从 10 更改到 15 -- ZeroHello 搜索功能稳定性/速度改进 -- 提升机械硬盘下的客户端性能 - -### 修复 -- 修复 IE11 wrapper nonce 错误 -- 修复在移动设备上的侧边栏 -- 修复站点大小计算 -- 修复 IE10 兼容性 -- Windows XP ZeroBundle 兼容性(感谢中国人民) - - -## ZeroNet 0.3.7 (2016-05-27) -### 更改 -- 通过只传输补丁来减少带宽使用 -- 其他 CPU /内存优化 - - -## ZeroNet 0.3.6 (2016-05-27) -### 新增 -- 新的 ZeroHello -- Newsfeed 函数 - -### 修复 -- 安全性修复 - - -## ZeroNet 0.3.5 (2016-02-02) -### 新增 -- 带有 .onion 隐藏服务的完整 Tor 支持 -- 使用 ZeroNet 协议的 Bootstrap - -### 修复 -- 修复 Gevent 1.0.2 兼容性 - - -## ZeroNet 0.3.4 (2015-12-28) -### 新增 -- AES, ECIES API 函数支持 -- PushState 与 ReplaceState URL 通过 API 的操作支持 -- 多用户 localstorage diff --git a/CHANGELOG.md b/CHANGELOG.md index 225e424a..6974d18a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,201 @@ +### ZeroNet 0.9.0 (2023-07-12) Rev4630 + - Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9 + - Add trackers to Config.py for failsafety incase missing trackers.txt + - Added Proxy links + - Fix pysha3 dep installation issue + - FileRequest -> Remove Unnecessary check, Fix error wording + - Fix Response when site is missing for `actionAs` + + +### ZeroNet 0.8.5 (2023-02-12) Rev4625 + - Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows. + - default theme-class for missing value in `users.json`. + - Fetch Stats Plugin changes. + +### ZeroNet 0.8.4 (2022-12-12) Rev4620 + - Increase Minimum Site size to 25MB. + +### ZeroNet 0.8.3 (2022-12-11) Rev4611 + - main.py -> Fix accessing unassigned varible + - ContentManager -> Support for multiSig + - SiteStrorage.py -> Fix accessing unassigned varible + - ContentManager.py Improve Logging of Valid Signers + +### ZeroNet 0.8.2 (2022-11-01) Rev4610 + - Fix Startup Error when plugins dir missing + - Move trackers to seperate file & Add more trackers + - Config:: Skip loading missing tracker files + - Added documentation for getRandomPort fn + +### ZeroNet 0.8.1 (2022-10-01) Rev4600 + - fix readdress loop (cherry-pick previously added commit from conservancy) + - Remove Patreon badge + - Update README-ru.md (#177) + - Include inner_path of failed request for signing in error msg and response + - Don't Fail Silently When Cert is Not Selected + - Console Log Updates, Specify min supported ZeroNet version for Rust version Protocol Compatibility + - Update FUNDING.yml + +### ZeroNet 0.8.0 (2022-05-27) Rev4591 + - Revert File Open to catch File Access Errors. + +### ZeroNet 0.7.9-patch (2022-05-26) Rev4586 + - Use xescape(s) from zeronet-conservancy + - actionUpdate response Optimisation + - Fetch Plugins Repo Updates + - Fix Unhandled File Access Errors + - Create codeql-analysis.yml + +### ZeroNet 0.7.9 (2022-05-26) Rev4585 + - Rust Version Compatibility for update Protocol msg + - Removed Non Working Trakers. + - Dynamically Load Trackers from Dashboard Site. + - Tracker Supply Improvements. + - Fix Repo Url for Bug Report + - First Party Tracker Update Service using Dashboard Site. + - remove old v2 onion service [#158](https://github.com/ZeroNetX/ZeroNet/pull/158) + +### ZeroNet 0.7.8 (2022-03-02) Rev4580 + - Update Plugins with some bug fixes and Improvements + +### ZeroNet 0.7.6 (2022-01-12) Rev4565 + - Sync Plugin Updates + - Clean up tor v3 patch [#115](https://github.com/ZeroNetX/ZeroNet/pull/115) + - Add More Default Plugins to Repo + - Doubled Site Publish Limits + - Update ZeroNet Repo Urls [#103](https://github.com/ZeroNetX/ZeroNet/pull/103) + - UI/UX: Increases Size of Notifications Close Button [#106](https://github.com/ZeroNetX/ZeroNet/pull/106) + - Moved Plugins to Seperate Repo + - Added `access_key` variable in Config, this used to access restrited plugins when multiuser plugin is enabled. When MultiUserPlugin is enabled we cannot access some pages like /Stats, this key will remove such restriction with access key. + - Added `last_connection_id_current_version` to ConnectionServer, helpful to estimate no of connection from current client version. + - Added current version: connections to /Stats page. see the previous point. + +### ZeroNet 0.7.5 (2021-11-28) Rev4560 + - Add more default trackers + - Change default homepage address to `1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d` + - Change default update site address to `1Update8crprmciJHwp2WXqkx2c4iYp18` + +### ZeroNet 0.7.3 (2021-11-28) Rev4555 + - Fix xrange is undefined error + - Fix Incorrect viewport on mobile while loading + - Tor-V3 Patch by anonymoose + + +### ZeroNet 0.7.1 (2019-07-01) Rev4206 +### Added + - Built-in logging console in the web UI to see what's happening in the background. (pull down top-right 0 button to see it) + - Display database rebuild errors [Thanks to Lola] + - New plugin system that allows to install and manage builtin/third party extensions to the ZeroNet client using the web interface. + - Support multiple trackers_file + - Add OpenSSL 1.1 support to CryptMessage plugin based on Bitmessage modifications [Thanks to radfish] + - Display visual error message on startup errors + - Fix max opened files changing on Windows platform + - Display TLS1.3 compatibility on /Stats page + - Add fake SNI and ALPN to peer connections to make it more like standard https connections + - Hide and ignore tracker_proxy setting in Tor: Always mode as it's going to use Tor anyway. + - Deny websocket connections from unknown origins + - Restrict open_browser values to avoid RCE on sandbox escape + - Offer access web interface by IP address in case of unknown host + - Link to site's sidebar with "#ZeroNet:OpenSidebar" hash + +### Changed + - Allow .. in file names [Thanks to imachug] + - Change unstable trackers + - More clean errors on sites.json/users.json load error + - Various tweaks for tracker rating on unstable connections + - Use OpenSSL 1.1 dlls from default Python Windows distribution if possible + - Re-factor domain resolving for easier domain plugins + - Disable UDP connections if --proxy is used + - New, decorator-based Websocket API permission system to avoid future typo mistakes + +### Fixed + - Fix parsing config lines that have no value + - Fix start.py [Thanks to imachug] + - Allow multiple values of the same key in the config file [Thanks ssdifnskdjfnsdjk for reporting] + - Fix parsing config file lines that has % in the value [Thanks slrslr for reporting] + - Fix bootstrapper plugin hash reloads [Thanks geekless for reporting] + - Fix CryptMessage plugin OpenSSL dll loading on Windows (ZeroMail errors) [Thanks cxgreat2014 for reporting] + - Fix startup error when using OpenSSL 1.1 [Thanks to imachug] + - Fix a bug that did not loaded merged site data for 5 sec after the merged site got added + - Fix typo that allowed to add new plugins in public proxy mode. [Thanks styromaniac for reporting] + - Fix loading non-big files with "|all" postfix [Thanks to krzotr] + - Fix OpenSSL cert generation error crash by change Windows console encoding to utf8 + +#### Wrapper html injection vulnerability [Reported by ivanq] + +In ZeroNet before rev4188 the wrapper template variables was rendered incorrectly. + +Result: The opened site was able to gain WebSocket connection with unrestricted ADMIN/NOSANDBOX access, change configuration values and possible RCE on client's machine. + +Fix: Fixed the template rendering code, disallowed WebSocket connections from unknown locations, restricted open_browser configuration values to avoid possible RCE in case of sandbox escape. + +Note: The fix is also back ported to ZeroNet Py 2.x version (Rev3870) + + +### ZeroNet 0.7.0 (2019-06-12) Rev4106 (First release targeting Python 3.4+) +### Added + - 5-10x faster signature verification by using libsecp256k1 (Thanks to ZeroMux) + - Generated SSL certificate randomization to avoid protocol filters (Thanks to ValdikSS) + - Offline mode + - P2P source code update using ZeroNet protocol + - ecdsaSign/Verify commands to CryptMessage plugin (Thanks to imachug) + - Efficient file rename: change file names instead of re-downloading the file. + - Make redirect optional on site cloning (Thanks to Lola) + - EccPrivToPub / EccPubToPriv functions (Thanks to imachug) + - Detect and change dark/light theme based on OS setting (Thanks to filips123) + +### Changed + - Re-factored code to Python3 runtime (compatible with Python 3.4-3.8) + - More safe database sync mode + - Removed bundled third-party libraries where it's possible + - Use lang=en instead of lang={lang} in urls to avoid url encode problems + - Remove environment details from error page + - Don't push content.json updates larger than 10kb to significantly reduce bw usage for site with many files + +### Fixed + - Fix sending files with \0 characters + - Security fix: Escape error detail to avoid XSS (reported by krzotr) + - Fix signature verification using libsecp256k1 for compressed addresses (mostly certificates generated in the browser) + - Fix newsfeed if you have more than 1000 followed topic/post on one site. + - Fix site download as zip file + - Fix displaying sites with utf8 title + - Error message if dbRebuild fails (Thanks to Lola) + - Fix browser reopen if executing start.py again. (Thanks to imachug) + + +### ZeroNet 0.6.5 (2019-02-16) Rev3851 (Last release targeting Python 2.7.x) +### Added + - IPv6 support in peer exchange, bigfiles, optional file finding, tracker sharing, socket listening and connecting (based on tangdou1 modifications) + - New tracker database format with IPv6 support + - Display notification if there is an unpublished modification for your site + - Listen and shut down normally for SIGTERM (Thanks to blurHY) + - Support tilde `~` in filenames (by d14na) + - Support map for Namecoin subdomain names (Thanks to lola) + - Add log level to config page + - Support `{data}` for data dir variable in trackers_file value + - Quick check content.db on startup and rebuild if necessary + - Don't show meek proxy option if the tor client does not supports it + +### Changed + - Refactored port open checking with IPv6 support + - Consider non-local IPs as external even is the open port check fails (for CJDNS and Yggdrasil support) + - Add IPv6 tracker and change unstable tracker + - Don't correct sent local time with the calculated time correction + - Disable CSP for Edge + - Only support CREATE commands in dbschema indexes node and SELECT from storage.query + +### Fixed + - Check the length of master seed when executing cryptGetPrivatekey CLI command + - Only reload source code on file modification / creation + - Detection and issue warning for latest no-script plugin + - Fix atomic write of a non-existent file + - Fix sql queries with lots of variables and sites with lots of content.json + - Fix multi-line parsing of zeronet.conf + - Fix site deletion from users.json + - Fix site cloning before site downloaded (Reported by unsystemizer) + - Fix queryJson for non-list nodes (Reported by MingchenZhang) + + ## ZeroNet 0.6.4 (2018-10-20) Rev3660 ### Added - New plugin: UiConfig. A web interface that allows changing ZeroNet settings. diff --git a/src/lib/pyelliptic/LICENSE b/COPYING similarity index 99% rename from src/lib/pyelliptic/LICENSE rename to COPYING index 94a9ed02..f288702d 100644 --- a/src/lib/pyelliptic/LICENSE +++ b/COPYING @@ -1,7 +1,7 @@ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 - Copyright (C) 2007 Free Software Foundation, Inc. + Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. @@ -645,7 +645,7 @@ the "copyright" line and a pointer to where the full notice is found. GNU General Public License for more details. You should have received a copy of the GNU General Public License - along with this program. If not, see . + along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. @@ -664,11 +664,11 @@ might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see -. +. The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read -. +. diff --git a/Dockerfile b/Dockerfile index 7fcd83ca..3f1d3c18 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,26 +1,33 @@ -FROM alpine:3.8 +FROM alpine:3.15 #Base settings ENV HOME /root +COPY requirements.txt /root/requirements.txt + #Install ZeroNet -RUN apk --no-cache --no-progress add musl-dev gcc python python-dev py2-pip tor openssl \ - && pip install --no-cache-dir gevent msgpack \ - && apk del musl-dev gcc python-dev py2-pip \ +RUN apk --update --no-cache --no-progress add python3 python3-dev py3-pip gcc g++ autoconf automake libtool libffi-dev musl-dev make tor openssl \ + && pip3 install -r /root/requirements.txt \ + && apk del python3-dev gcc g++ autoconf automake libtool libffi-dev musl-dev make \ && echo "ControlPort 9051" >> /etc/tor/torrc \ && echo "CookieAuthentication 1" >> /etc/tor/torrc + +RUN python3 -V \ + && python3 -m pip list \ + && tor --version \ + && openssl version #Add Zeronet source COPY . /root VOLUME /root/data #Control if Tor proxy is started -ENV ENABLE_TOR false +ENV ENABLE_TOR true WORKDIR /root #Set upstart command -CMD (! ${ENABLE_TOR} || tor&) && python zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552 +CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26117 #Expose ports -EXPOSE 43110 26552 +EXPOSE 43110 26117 diff --git a/Dockerfile.arm64v8 b/Dockerfile.arm64v8 new file mode 100644 index 00000000..d27b7620 --- /dev/null +++ b/Dockerfile.arm64v8 @@ -0,0 +1,34 @@ +FROM alpine:3.12 + +#Base settings +ENV HOME /root + +COPY requirements.txt /root/requirements.txt + +#Install ZeroNet +RUN apk --update --no-cache --no-progress add python3 python3-dev gcc libffi-dev musl-dev make tor openssl \ + && pip3 install -r /root/requirements.txt \ + && apk del python3-dev gcc libffi-dev musl-dev make \ + && echo "ControlPort 9051" >> /etc/tor/torrc \ + && echo "CookieAuthentication 1" >> /etc/tor/torrc + +RUN python3 -V \ + && python3 -m pip list \ + && tor --version \ + && openssl version + +#Add Zeronet source +COPY . /root +VOLUME /root/data + +#Control if Tor proxy is started +ENV ENABLE_TOR false + +WORKDIR /root + +#Set upstart command +CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552 + +#Expose ports +EXPOSE 43110 26552 + diff --git a/LICENSE b/LICENSE index d6a93266..0d17b72d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,340 +1,27 @@ -GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -License is intended to guarantee your freedom to share and change free -software--to make sure the software is free for all its users. This -General Public License applies to most of the Free Software -Foundation's software and to any other program whose authors commit to -using it. (Some other Free Software Foundation software is covered by -the GNU Lesser General Public License instead.) You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -this service if you wish), that you receive source code or can get it -if you want it, that you can change the software or use pieces of it -in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid -anyone to deny you these rights or to ask you to surrender the rights. -These restrictions translate to certain responsibilities for you if you -distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must give the recipients all the rights that -you have. You must make sure that they, too, receive or can get the -source code. And you must show them these terms so they know their -rights. - - We protect your rights with two steps: (1) copyright the software, and -(2) offer you this license which gives you legal permission to copy, -distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain -that everyone understands that there is no warranty for this free -software. If the software is modified by someone else and passed on, we -want its recipients to know that what they have is not the original, so -that any problems introduced by others will not reflect on the original -authors' reputations. - - Finally, any free program is threatened constantly by software -patents. We wish to avoid the danger that redistributors of a free -program will individually obtain patent licenses, in effect making the -program proprietary. To prevent this, we have made it clear that any -patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and -modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains -a notice placed by the copyright holder saying it may be distributed -under the terms of this General Public License. The "Program", below, -refers to any such program or work, and a "work based on the Program" -means either the Program or any derivative work under copyright law: -that is to say, a work containing the Program or a portion of it, -either verbatim or with modifications and/or translated into another -language. (Hereinafter, translation is included without limitation in -the term "modification".) Each licensee is addressed as "you". - -Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running the Program is not restricted, and the output from the Program -is covered only if its contents constitute a work based on the -Program (independent of having been made by running the Program). -Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's -source code as you receive it, in any medium, provided that you -conspicuously and appropriately publish on each copy an appropriate -copyright notice and disclaimer of warranty; keep intact all the -notices that refer to this License and to the absence of any warranty; -and give any other recipients of the Program a copy of this License -along with the Program. - -You may charge a fee for the physical act of transferring a copy, and -you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion -of it, thus forming a work based on the Program, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Program, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Program, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Program. - -In addition, mere aggregation of another work not based on the Program -with the Program (or with a work based on the Program) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, -under Section 2) in object code or executable form under the terms of -Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - -The source code for a work means the preferred form of the work for -making modifications to it. For an executable work, complete source -code means all the source code for all modules it contains, plus any -associated interface definition files, plus the scripts used to -control compilation and installation of the executable. However, as a -special exception, the source code distributed need not include -anything that is normally distributed (in either source or binary -form) with the major components (compiler, kernel, and so on) of the -operating system on which the executable runs, unless that component -itself accompanies the executable. - -If distribution of executable or object code is made by offering -access to copy from a designated place, then offering equivalent -access to copy the source code from the same place counts as -distribution of the source code, even though third parties are not -compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program -except as expressly provided under this License. Any attempt -otherwise to copy, modify, sublicense or distribute the Program is -void, and will automatically terminate your rights under this License. -However, parties who have received copies, or rights, from you under -this License will not have their licenses terminated so long as such -parties remain in full compliance. - - 5. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Program or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Program (or any work based on the -Program), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the -Program), the recipient automatically receives a license from the -original licensor to copy, distribute or modify the Program subject to -these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties to -this License. - - 7. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Program at all. For example, if a patent -license would not permit royalty-free redistribution of the Program by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Program. - -If any portion of this section is held invalid or unenforceable under -any particular circumstance, the balance of the section is intended to -apply and the section as a whole is intended to apply in other -circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system, which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Program under this License -may add an explicit geographical distribution limitation excluding -those countries, so that distribution is permitted only in or among -countries not thus excluded. In such case, this License incorporates -the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions -of the General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - -Each version is given a distinguishing version number. If the Program -specifies a version number of this License which applies to it and "any -later version", you have the option of following the terms and conditions -either of that version or of any later version published by the Free -Software Foundation. If the Program does not specify a version number of -this License, you may choose any version ever published by the Free Software -Foundation. - - 10. If you wish to incorporate parts of the Program into other free -programs whose distribution conditions are different, write to the author -to ask for permission. For software which is copyrighted by the Free -Software Foundation, write to the Free Software Foundation; we sometimes -make exceptions for this. Our decision will be guided by the two goals -of preserving the free status of all derivatives of our free software and -of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY -FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN -OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES -PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED -OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS -TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE -PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, -REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR -REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, -INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING -OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED -TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY -YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER -PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - {description} - Copyright (C) {year} {fullname} - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License along - with this program; if not, write to the Free Software Foundation, Inc., - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -Also add information on how to contact you by electronic and paper mail. - -If the program is interactive, make it output a short notice like this -when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, the commands you use may -be called something other than `show w' and `show c'; they could even be -mouse-clicks or menu items--whatever suits your program. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the program, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - {signature of Ty Coon}, 1 April 1989 - Ty Coon, President of Vice - -This General Public License does not permit incorporating your program into -proprietary programs. If your program is a subroutine library, you may -consider it more useful to permit linking proprietary applications with the -library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. - +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, version 3. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program. If not, see . + + +Additional Conditions : + +Contributing to this repo + This repo is governed by GPLv3, same is located at the root of the ZeroNet git repo, + unless specified separately all code is governed by that license, contributions to this repo + are divided into two key types, key contributions and non-key contributions, key contributions + are which, directly affects the code performance, quality and features of software, + non key contributions include things like translation datasets, image, graphic or video + contributions that does not affect the main usability of software but improves the existing + usability of certain thing or feature, these also include tests written with code, since their + purpose is to check, whether something is working or not as intended. All the non-key contributions + are governed by [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), unless specified + above, a contribution is ruled by the type of contribution if there is a conflict between two + contributing parties of repo in any case. diff --git a/README-ru.md b/README-ru.md index 75abbfab..7d557727 100644 --- a/README-ru.md +++ b/README-ru.md @@ -1,211 +1,133 @@ -# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) +# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) [简体中文](./README-zh-cn.md) [English](./README.md) -Децентрализованные вебсайты использующие Bitcoin криптографию и BitTorrent сеть - https://zeronet.io - +Децентрализованные вебсайты, использующие криптографию Bitcoin и протокол BitTorrent — https://zeronet.dev ([Зеркало в ZeroNet](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/)). В отличии от Bitcoin, ZeroNet'у не требуется блокчейн для работы, однако он использует ту же криптографию, чтобы обеспечить сохранность и проверку данных. ## Зачем? -* Мы верим в открытую, свободную, и не отцензуренную сеть и коммуникацию. -* Нет единой точки отказа: Сайт онлайн пока по крайней мере 1 пир обслуживает его. -* Никаких затрат на хостинг: Сайты обслуживаются посетителями. -* Невозможно отключить: Он нигде, потому что он везде. -* Быстр и работает оффлайн: Вы можете получить доступ к сайту, даже если Интернет недоступен. - +- Мы верим в открытую, свободную, и неподдающуюся цензуре сеть и связь. +- Нет единой точки отказа: Сайт остаётся онлайн, пока его обслуживает хотя бы 1 пир. +- Нет затрат на хостинг: Сайты обслуживаются посетителями. +- Невозможно отключить: Он нигде, потому что он везде. +- Скорость и возможность работать без Интернета: Вы сможете получить доступ к сайту, потому что его копия хранится на вашем компьютере и у ваших пиров. ## Особенности - * Обновляемые в реальном времени сайты - * Поддержка Namecoin .bit доменов - * Лёгок в установке: распаковал & запустил - * Клонирование вебсайтов в один клик - * Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - based authorization: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек - * Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы - * Анонимность: Полная поддержка сети Tor с помощью скрытых служб .onion вместо адресов IPv4 - * TLS зашифрованные связи - * Автоматическое открытие uPnP порта - * Плагин для поддержки многопользовательской (openproxy) - * Работает с любыми браузерами и операционными системами +- Обновление сайтов в реальном времени +- Поддержка доменов `.bit` ([Namecoin](https://www.namecoin.org)) +- Легкая установка: просто распакуйте и запустите +- Клонирование сайтов "в один клик" +- Беспарольная [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) + авторизация: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек +- Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы +- Анонимность: Полная поддержка сети Tor, используя скрытые службы `.onion` вместо адресов IPv4 +- Зашифрованное TLS подключение +- Автоматическое открытие UPnP–порта +- Плагин для поддержки нескольких пользователей (openproxy) +- Работа с любыми браузерами и операционными системами + +## Текущие ограничения + +- Файловые транзакции не сжаты +- Нет приватных сайтов ## Как это работает? -* После запуска `zeronet.py` вы сможете посетить зайты (zeronet сайты) используя адрес - `http://127.0.0.1:43110/{zeronet_address}` -(например. `http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`). -* Когда вы посещаете новый сайт zeronet, он пытается найти пиров с помощью BitTorrent - чтобы загрузить файлы сайтов (html, css, js ...) из них. -* Каждый посещенный зайт также обслуживается вами. (Т.е хранится у вас на компьютере) -* Каждый сайт содержит файл `content.json`, который содержит все остальные файлы в хэше sha512 - и подпись, созданную с использованием частного ключа сайта. -* Если владелец сайта (у которого есть закрытый ключ для адреса сайта) изменяет сайт, то он/она +- После запуска `zeronet.py` вы сможете посещать сайты в ZeroNet, используя адрес + `http://127.0.0.1:43110/{zeronet_адрес}` + (Например: `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). +- Когда вы посещаете новый сайт в ZeroNet, он пытается найти пиров с помощью протокола BitTorrent, + чтобы скачать у них файлы сайта (HTML, CSS, JS и т.д.). +- После посещения сайта вы тоже становитесь его пиром. +- Каждый сайт содержит файл `content.json`, который содержит SHA512 хеши всех остальные файлы + и подпись, созданную с помощью закрытого ключа сайта. +- Если владелец сайта (тот, кто владеет закрытым ключом для адреса сайта) изменяет сайт, он подписывает новый `content.json` и публикует его для пиров. После этого пиры проверяют целостность `content.json` - (используя подпись), они загружают измененные файлы и публикуют новый контент для других пиров. - -#### [Слайд-шоу о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [Часто задаваемые вопросы »](https://zeronet.io/docs/faq/) - -#### [Документация разработчика ZeroNet »](https://zeronet.io/docs/site_development/getting_started/) + (используя подпись), скачвают изменённые файлы и распространяют новый контент для других пиров. +[Презентация о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) +[Часто задаваемые вопросы »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) +[Документация разработчика ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Скриншоты ![Screenshot](https://i.imgur.com/H60OAHY.png) ![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) +[Больше скриншотов в документации ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) -#### [Больше скриншотов в ZeroNet документации »](https://zeronet.io/docs/using_zeronet/sample_sites/) +## Как присоединиться? +### Windows -## Как вступить +- Скачайте и распакуйте архив [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26МБ) +- Запустите `ZeroNet.exe` -* Скачайте ZeroBundle пакет: - * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) - * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) - * [Linux 64-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) - * [Linux 32-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) -* Распакуйте где угодно -* Запустите `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) +### macOS -### Linux терминал +- Скачайте и распакуйте архив [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14МБ) +- Запустите `ZeroNet.app` -* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` -* `tar xvpfz ZeroBundle-linux64.tar.gz` -* `cd ZeroBundle` -* Запустите с помощью `./ZeroNet.sh` +### Linux (64 бит) -Он загружает последнюю версию ZeroNet, затем запускает её автоматически. +- Скачайте и распакуйте архив [ZeroNet-linux.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip) (14МБ) +- Запустите `./ZeroNet.sh` -#### Ручная установка для Debian Linux +> **Note** +> Запустите таким образом: `./ZeroNet.sh --ui_ip '*' --ui_restrict ваш_ip_адрес`, чтобы разрешить удалённое подключение к веб–интерфейсу. -* `sudo apt-get update` -* `sudo apt-get install msgpack-python python-gevent` -* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` -* `tar xvpfz master.tar.gz` -* `cd ZeroNet-master` -* Запустите с помощью `python2 zeronet.py` -* Откройте http://127.0.0.1:43110/ в вашем браузере. +### Docker -### [Arch Linux](https://www.archlinux.org) +Официальный образ находится здесь: https://hub.docker.com/r/canewsin/zeronet/ -* `git clone https://aur.archlinux.org/zeronet.git` -* `cd zeronet` -* `makepkg -srci` -* `systemctl start zeronet` -* Откройте http://127.0.0.1:43110/ в вашем браузере. +### Android (arm, arm64, x86) -Смотрите [ArchWiki](https://wiki.archlinux.org)'s [ZeroNet -article](https://wiki.archlinux.org/index.php/ZeroNet) для дальнейшей помощи. +- Для работы требуется Android как минимум версии 5.0 Lollipop +- [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) +- Скачать APK: https://github.com/canewsin/zeronet_mobile/releases -### [Gentoo Linux](https://www.gentoo.org) +### Android (arm, arm64, x86) Облегчённый клиент только для просмотра (1МБ) -* [`layman -a raiagent`](https://github.com/leycec/raiagent) -* `echo '>=net-vpn/zeronet-0.5.4' >> /etc/portage/package.accept_keywords` -* *(Опционально)* Включить поддержку Tor: `echo 'net-vpn/zeronet tor' >> - /etc/portage/package.use` -* `emerge zeronet` -* `rc-service zeronet start` -* Откройте http://127.0.0.1:43110/ в вашем браузере. +- Для работы требуется Android как минимум версии 4.1 Jelly Bean +- [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) -Смотрите `/usr/share/doc/zeronet-*/README.gentoo.bz2` для дальнейшей помощи. +### Установка из исходного кода -### [FreeBSD](https://www.freebsd.org/) - -* `pkg install zeronet` or `cd /usr/ports/security/zeronet/ && make install clean` -* `sysrc zeronet_enable="YES"` -* `service zeronet start` -* Откройте http://127.0.0.1:43110/ в вашем браузере. - -### [Vagrant](https://www.vagrantup.com/) - -* `vagrant up` -* Подключитесь к VM с помощью `vagrant ssh` -* `cd /vagrant` -* Запустите `python2 zeronet.py --ui_ip 0.0.0.0` -* Откройте http://127.0.0.1:43110/ в вашем браузере. - -### [Docker](https://www.docker.com/) -* `docker run -d -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` -* Это изображение Docker включает в себя прокси-сервер Tor, который по умолчанию отключён. - Остерегайтесь что некоторые хостинг-провайдеры могут не позволить вам запускать Tor на своих серверах. - Если вы хотите включить его,установите переменную среды `ENABLE_TOR` в` true` (по умолчанию: `false`) Например: - - `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` -* Откройте http://127.0.0.1:43110/ в вашем браузере. - -### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) - -* `virtualenv env` -* `source env/bin/activate` -* `pip install msgpack gevent` -* `python2 zeronet.py` -* Откройте http://127.0.0.1:43110/ в вашем браузере. - -## Текущие ограничения - -* ~~Нет torrent-похожего файла разделения для поддержки больших файлов~~ (поддержка больших файлов добавлена) -* ~~Не анонимнее чем Bittorrent~~ (добавлена встроенная поддержка Tor) -* Файловые транзакции не сжаты ~~ или незашифрованы еще ~~ (добавлено шифрование TLS) -* Нет приватных сайтов - - -## Как я могу создать сайт в Zeronet? - -Завершите работу zeronet, если он запущен - -```bash -$ zeronet.py siteCreate -... -- Site private key (Приватный ключ сайта): 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq -- Site address (Адрес сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -... -- Site created! (Сайт создан) -$ zeronet.py -... +```sh +wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip +unzip ZeroNet-src.zip +cd ZeroNet +sudo apt-get update +sudo apt-get install python3-pip +sudo python3 -m pip install -r requirements.txt ``` +- Запустите `python3 zeronet.py` -Поздравляем, вы закончили! Теперь каждый может получить доступ к вашему зайту используя -`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` +Откройте приветственную страницу ZeroHello в вашем браузере по ссылке http://127.0.0.1:43110/ -Следующие шаги: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/) +## Как мне создать сайт в ZeroNet? +- Кликните на **⋮** > **"Create new, empty site"** в меню на сайте [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). +- Вы будете **перенаправлены** на совершенно новый сайт, который может быть изменён только вами! +- Вы можете найти и изменить контент вашего сайта в каталоге **data/[адрес_вашего_сайта]** +- После изменений откройте ваш сайт, переключите влево кнопку "0" в правом верхнем углу, затем нажмите кнопки **sign** и **publish** внизу -## Как я могу модифицировать Zeronet сайт? - -* Измените файлы расположенные в data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 директории. - Когда закончите с изменением: - -```bash -$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -- Signing site (Подпись сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... -Private key (Приватный ключ) (input hidden): -``` - -* Введите секретный ключ, который вы получили при создании сайта, потом: - -```bash -$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -... -Site:13DNDk..bhC2 Publishing to 3/10 peers... -Site:13DNDk..bhC2 Successfuly published to 3 peers -- Serving files.... -``` - -* Вот и всё! Вы успешно подписали и опубликовали свои изменения. - +Следующие шаги: [Документация разработчика ZeroNet](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Поддержите проект -- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX -- Paypal: https://zeronet.io/docs/help_zeronet/donate/ - -### Спонсоры - -* Улучшенная совместимость с MacOS / Safari стала возможной благодаря [BrowserStack.com](https://www.browserstack.com) +- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Рекомендуем) +- LiberaPay: https://liberapay.com/PramUkesh +- Paypal: https://paypal.me/PramUkesh +- Другие способы: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) #### Спасибо! -* Больше информации, помощь, журнал изменений, zeronet сайты: https://www.reddit.com/r/zeronet/ -* Приходите, пообщайтесь с нами: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или на [gitter](https://gitter.im/HelloZeroNet/ZeroNet) -* Email: hello@zeronet.io (PGP: CB9613AE) +- Здесь вы можете получить больше информации, помощь, прочитать список изменений и исследовать ZeroNet сайты: https://www.reddit.com/r/zeronetx/ +- Общение происходит на канале [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или в [Gitter](https://gitter.im/canewsin/ZeroNet) +- Электронная почта: canews.in@gmail.com diff --git a/README-zh-cn.md b/README-zh-cn.md index 103194ea..37095ff6 100644 --- a/README-zh-cn.md +++ b/README-zh-cn.md @@ -1,51 +1,49 @@ -# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) +# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) [English](./README.md) -使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.io +使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.dev -## 为什么? +## 为什么? -* 我们相信开放,自由,无审查的网络 +* 我们相信开放,自由,无审查的网络和通讯 * 不会受单点故障影响:只要有在线的节点,站点就会保持在线 -* 无托管费用: 站点由访问者托管 -* 无法关闭: 因为节点无处不在 -* 快速并可离线运行: 即使没有互联网连接也可以使用 +* 无托管费用:站点由访问者托管 +* 无法关闭:因为节点无处不在 +* 快速并可离线运行:即使没有互联网连接也可以使用 ## 功能 * 实时站点更新 * 支持 Namecoin 的 .bit 域名 - * 安装方便: 只需解压并运行 + * 安装方便:只需解压并运行 * 一键克隆存在的站点 - * 无需密码、基于 [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) 的认证:用与比特币钱包相同的加密方法用来保护你的账户 -你的账户被使用和比特币钱包相同的加密方法 - * 内建 SQL 服务器和 P2P 数据同步: 让开发更简单并提升加载速度 - * 匿名性: 完整的 Tor 网络支持,支持通过 .onion 隐藏服务相互连接而不是通过IPv4地址连接 + * 无需密码、基于 [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) + 的认证:您的账户被与比特币钱包相同的加密方法保护 + * 内建 SQL 服务器和 P2P 数据同步:让开发更简单并提升加载速度 + * 匿名性:完整的 Tor 网络支持,支持通过 .onion 隐藏服务相互连接而不是通过 IPv4 地址连接 * TLS 加密连接 * 自动打开 uPnP 端口 - * 插件和多用户 (开放式代理) 支持 - * 全平台兼容 + * 多用户(openproxy)支持的插件 + * 适用于任何浏览器 / 操作系统 ## 原理 -* 在你运行`zeronet.py`后你将可以通过`http://127.0.0.1:43110/{zeronet_address}` (比如. -`http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`)。访问 zeronet 中的站点。 +* 在运行 `zeronet.py` 后,您将可以通过 + `http://127.0.0.1:43110/{zeronet_address}`(例如: + `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`)访问 zeronet 中的站点 +* 在您浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点,从而下载需要的文件(html,css,js...) +* 您将会储存每一个浏览过的站点 +* 每个站点都包含一个名为 `content.json` 的文件,它储存了其他所有文件的 sha512 散列值以及一个通过站点私钥生成的签名 +* 如果站点的所有者(拥有站点地址的私钥)修改了站点,并且他 / 她签名了新的 `content.json` 然后推送至其他节点, + 那么这些节点将会在使用签名验证 `content.json` 的真实性后,下载修改后的文件并将新内容推送至另外的节点 -* 在你浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点,从而下载需要的文件 (html, css, js...) +#### [关于 ZeroNet 加密,站点更新,多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) +#### [常见问题 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) -* 你将会储存每一个浏览过的站点 -* 每个站点都包含一个名为 `content.json` ,它储存了其他所有文件的 sha512 hash 值 - 和一个通过站点私钥建立的签名 -* 如果站点的所有者 (拥有私钥的那个人) 修改了站点, 并且他/她签名了新的 `content.json` 然后推送至其他节点, -那么所有节点将会在验证 `content.json` 的真实性 (使用签名)后, 下载修改后的文件并推送至其他节点。 - -#### [有关于 ZeroNet 加密, 站点更新, 多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1qBxkroB_iiX2zHEn0dt-N-qRZgyEzui46XS2hEa3AA4/pub?start=false&loop=false&delayms=3000) -#### [常见问题 »](https://zeronet.io/docs/faq/) - -#### [ZeroNet开发者文档 »](https://zeronet.io/docs/site_development/getting_started/) +#### [ZeroNet 开发者文档 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## 屏幕截图 @@ -53,136 +51,82 @@ ![Screenshot](https://i.imgur.com/H60OAHY.png) ![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -#### [在 ZeroNet 文档里查看更多的屏幕截图 »](https://zeronet.io/docs/using_zeronet/sample_sites/) +#### [ZeroNet 文档中的更多屏幕截图 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) -## 如何加入 ? +## 如何加入 -* 下载 ZeroBundle 文件包: - * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) - * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) - * [Linux 64bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) - * [Linux 32bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) -* 解压缩 -* 运行 `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) +### Windows -### Linux 命令行 + - 下载 [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) + - 在任意位置解压缩 + - 运行 `ZeroNet.exe` + +### macOS -* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` -* `tar xvpfz ZeroBundle-linux64.tar.gz` -* `cd ZeroBundle` -* 执行 `./ZeroNet.sh` 来启动 + - 下载 [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) + - 在任意位置解压缩 + - 运行 `ZeroNet.app` + +### Linux (x86-64bit) -在你打开时他将会自动下载最新版本的 ZeroNet 。 + - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` + - `unzip ZeroNet-linux.zip` + - `cd ZeroNet-linux` + - 使用以下命令启动 `./ZeroNet.sh` + - 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面 + + __提示:__ 若要允许在 Web 界面上的远程连接,使用以下命令启动 `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` -#### 在 Debian Linux 中手动安装 +### 从源代码安装 -* `sudo apt-get update` -* `sudo apt-get install msgpack-python python-gevent` -* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` -* `tar xvpfz master.tar.gz` -* `cd ZeroNet-master` -* 执行 `python2 zeronet.py` 来启动 -* 在你的浏览器中打开 http://127.0.0.1:43110/ + - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` + - `unzip ZeroNet-src.zip` + - `cd ZeroNet` + - `sudo apt-get update` + - `sudo apt-get install python3-pip` + - `sudo python3 -m pip install -r requirements.txt` + - 使用以下命令启动 `python3 zeronet.py` + - 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面 -### [FreeBSD](https://www.freebsd.org/) + ### Android (arm, arm64, x86) + - minimum Android version supported 21 (Android 5.0 Lollipop) + - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) + - APK download: https://github.com/canewsin/zeronet_mobile/releases -* `pkg install zeronet` 或者 `cd /usr/ports/security/zeronet/ && make install clean` -* `sysrc zeronet_enable="YES"` -* `service zeronet start` -* 在你的浏览器中打开 http://127.0.0.1:43110/ - -### [Vagrant](https://www.vagrantup.com/) - -* `vagrant up` -* 通过 `vagrant ssh` 连接到 VM -* `cd /vagrant` -* 运行 `python2 zeronet.py --ui_ip 0.0.0.0` -* 在你的浏览器中打开 http://127.0.0.1:43110/ - -### [Docker](https://www.docker.com/) -* `docker run -d -v :/root/data -p 26552:26552 -p 43110:43110 nofish/zeronet` -* 这个 Docker 镜像包含了 Tor ,但默认是禁用的,因为一些托管商不允许你在他们的服务器上运行 Tor。如果你希望启用它, -设置 `ENABLE_TOR` 环境变量为 `true` (默认: `false`). E.g.: - - `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 26552:26552 -p 43110:43110 nofish/zeronet` -* 在你的浏览器中打开 http://127.0.0.1:43110/ - -### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) - -* `virtualenv env` -* `source env/bin/activate` -* `pip install msgpack gevent` -* `python2 zeronet.py` -* 在你的浏览器中打开 http://127.0.0.1:43110/ +### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) + - minimum Android version supported 16 (JellyBean) + - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) ## 现有限制 -* ~~没有类似于 BitTorrent 的文件拆分来支持大文件~~ (已添加大文件支持) -* ~~没有比 BitTorrent 更好的匿名性~~ (已添加内置的完整 Tor 支持) -* 传输文件时没有压缩~~和加密~~ (已添加 TLS 支持) +* 传输文件时没有压缩 * 不支持私有站点 -## 如何创建一个 ZeroNet 站点? +## 如何创建一个 ZeroNet 站点? + * 点击 [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d) 站点的 **⋮** > **「新建空站点」** 菜单项 + * 您将被**重定向**到一个全新的站点,该站点只能由您修改 + * 您可以在 **data/[您的站点地址]** 目录中找到并修改网站的内容 + * 修改后打开您的网站,将右上角的「0」按钮拖到左侧,然后点击底部的**签名**并**发布**按钮 -如果 zeronet 在运行,把它关掉 -执行: -```bash -$ zeronet.py siteCreate -... -- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq -- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -... -- Site created! -$ zeronet.py -... -``` - -你已经完成了! 现在任何人都可以通过 -`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` -来访问你的站点 - -下一步: [ZeroNet 开发者文档](https://zeronet.io/docs/site_development/getting_started/) - - -## 我要如何修改 ZeroNet 站点? - -* 修改位于 data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 的目录. - 在你改好之后: - -```bash -$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... -Private key (input hidden): -``` - -* 输入你在创建站点时获得的私钥 - -```bash -$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -... -Site:13DNDk..bhC2 Publishing to 3/10 peers... -Site:13DNDk..bhC2 Successfuly published to 3 peers -- Serving files.... -``` - -* 就是这样! 你现在已经成功的签名并推送了你的更改。 - +接下来的步骤:[ZeroNet 开发者文档](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## 帮助这个项目 +- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) +- LiberaPay: https://liberapay.com/PramUkesh +- Paypal: https://paypal.me/PramUkesh +- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) -- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX -- Paypal: https://zeronet.io/docs/help_zeronet/donate/ -### 赞助商 +#### 感谢您! -* 在 OSX/Safari 下 [BrowserStack.com](https://www.browserstack.com) 带来更好的兼容性 - -#### 感谢! - -* 更多信息, 帮助, 变更记录和 zeronet 站点: https://www.reddit.com/r/zeronet/ -* 在: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 和我们聊天,或者使用 [gitter](https://gitter.im/HelloZeroNet/ZeroNet) -* [这里](https://gitter.im/ZeroNet-zh/Lobby)是一个 gitter 上的中文聊天室 -* Email: hello@noloop.me +* 更多信息,帮助,变更记录和 zeronet 站点:https://www.reddit.com/r/zeronetx/ +* 前往 [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 或 [gitter](https://gitter.im/canewsin/ZeroNet) 和我们聊天 +* [这里](https://gitter.im/canewsin/ZeroNet)是一个 gitter 上的中文聊天室 +* Email: canews.in@gmail.com diff --git a/README.md b/README.md index 07d09ddb..70b79adc 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,6 @@ -# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) - -[简体中文](./README-zh-cn.md) -[Русский](./README-ru.md) - -Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.io +# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) + +Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.dev / [ZeroNet Site](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/), Unlike Bitcoin, ZeroNet Doesn't need a blockchain to run, But uses cryptography used by BTC, to ensure data integrity and validation. ## Why? @@ -36,22 +33,22 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ * After starting `zeronet.py` you will be able to visit zeronet sites using `http://127.0.0.1:43110/{zeronet_address}` (eg. - `http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`). + `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). * When you visit a new zeronet site, it tries to find peers using the BitTorrent network so it can download the site files (html, css, js...) from them. * Each visited site is also served by you. * Every site contains a `content.json` file which holds all other files in a sha512 hash and a signature generated using the site's private key. * If the site owner (who has the private key for the site address) modifies the - site, then he/she signs the new `content.json` and publishes it to the peers. + site and signs the new `content.json` and publishes it to the peers. Afterwards, the peers verify the `content.json` integrity (using the signature), they download the modified files and publish the new content to other peers. #### [Slideshow about ZeroNet cryptography, site updates, multi-user sites »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [Frequently asked questions »](https://zeronet.io/docs/faq/) +#### [Frequently asked questions »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) -#### [ZeroNet Developer Documentation »](https://zeronet.io/docs/site_development/getting_started/) +#### [ZeroNet Developer Documentation »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Screenshots @@ -59,163 +56,101 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ ![Screenshot](https://i.imgur.com/H60OAHY.png) ![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -#### [More screenshots in ZeroNet docs »](https://zeronet.io/docs/using_zeronet/sample_sites/) +#### [More screenshots in ZeroNet docs »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) ## How to join -* Download ZeroBundle package: - * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) - * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) - * [Linux x86/64-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) - * [Linux x86/32-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) -* Unpack anywhere -* Run `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) +### Windows -### Linux terminal on x86-64 + - Download [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) + - Unpack anywhere + - Run `ZeroNet.exe` + +### macOS -* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` -* `tar xvpfz ZeroBundle-linux64.tar.gz` -* `cd ZeroBundle` -* Start with `./ZeroNet.sh` + - Download [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) + - Unpack anywhere + - Run `ZeroNet.app` + +### Linux (x86-64bit) + - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` + - `unzip ZeroNet-linux.zip` + - `cd ZeroNet-linux` + - Start with: `./ZeroNet.sh` + - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ + + __Tip:__ Start with `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` to allow remote connections on the web interface. + + ### Android (arm, arm64, x86) + - minimum Android version supported 21 (Android 5.0 Lollipop) + - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) + - APK download: https://github.com/canewsin/zeronet_mobile/releases -It downloads the latest version of ZeroNet then starts it automatically. +### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) + - minimum Android version supported 16 (JellyBean) + - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) -#### Manual install for Debian Linux -* `sudo apt-get update` -* `sudo apt-get install msgpack-python python-gevent` -* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` -* `tar xvpfz master.tar.gz` -* `cd ZeroNet-master` -* Start with `python2 zeronet.py` -* Open http://127.0.0.1:43110/ in your browser +#### Docker +There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/ -### [Whonix](https://www.whonix.org) +### Online Proxies +Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one. -* [Instructions](https://www.whonix.org/wiki/ZeroNet) +#### Official ZNX Proxy : -### [Arch Linux](https://www.archlinux.org) +https://proxy.zeronet.dev/ -* `git clone https://aur.archlinux.org/zeronet.git` -* `cd zeronet` -* `makepkg -srci` -* `systemctl start zeronet` -* Open http://127.0.0.1:43110/ in your browser +https://zeronet.dev/ -See [ArchWiki](https://wiki.archlinux.org)'s [ZeroNet -article](https://wiki.archlinux.org/index.php/ZeroNet) for further assistance. +#### From Community -### [Gentoo Linux](https://www.gentoo.org) +https://0net-preview.com/ -* [`eselect repository enable raiagent`](https://github.com/leycec/raiagent) -* `emerge --sync` -* `echo 'net-vpn/zeronet' >> /etc/portage/package.accept_keywords` -* *(Optional)* Enable Tor support: `echo 'net-vpn/zeronet tor' >> - /etc/portage/package.use` -* `emerge zeronet` -* `rc-service zeronet start` -* *(Optional)* Enable zeronet at runlevel "default": `rc-update add zeronet` -* Open http://127.0.0.1:43110/ in your browser +https://portal.ngnoid.tv/ -See `/usr/share/doc/zeronet-*/README.gentoo.bz2` for further assistance. +https://zeronet.ipfsscan.io/ -### [FreeBSD](https://www.freebsd.org/) -* `pkg install zeronet` or `cd /usr/ports/security/zeronet/ && make install clean` -* `sysrc zeronet_enable="YES"` -* `service zeronet start` -* Open http://127.0.0.1:43110/ in your browser +### Install from source -### [Vagrant](https://www.vagrantup.com/) - -* `vagrant up` -* Access VM with `vagrant ssh` -* `cd /vagrant` -* Run `python2 zeronet.py --ui_ip 0.0.0.0` -* Open http://127.0.0.1:43110/ in your browser - -### [Docker](https://www.docker.com/) -* `docker run -d -v :/root/data -p 26552:26552 -p 127.0.0.1:43110:43110 nofish/zeronet` -* This Docker image includes the Tor proxy, which is disabled by default. Beware that some -hosting providers may not allow you running Tor in their servers. If you want to enable it, -set `ENABLE_TOR` environment variable to `true` (Default: `false`). E.g.: - - `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 26552:26552 -p 127.0.0.1:43110:43110 nofish/zeronet` -* Open http://127.0.0.1:43110/ in your browser - -### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) - -* `virtualenv env` -* `source env/bin/activate` -* `pip install msgpack gevent` -* `python2 zeronet.py` -* Open http://127.0.0.1:43110/ in your browser + - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` + - `unzip ZeroNet-src.zip` + - `cd ZeroNet` + - `sudo apt-get update` + - `sudo apt-get install python3-pip` + - `sudo python3 -m pip install -r requirements.txt` + - Start with: `python3 zeronet.py` + - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ ## Current limitations -* ~~No torrent-like file splitting for big file support~~ (big file support added) -* ~~No more anonymous than Bittorrent~~ (built-in full Tor support added) -* File transactions are not compressed ~~or encrypted yet~~ (TLS encryption added) +* File transactions are not compressed * No private sites ## How can I create a ZeroNet site? -Shut down zeronet if you are running it already - -```bash -$ zeronet.py siteCreate -... -- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq -- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -... -- Site created! -$ zeronet.py -... -``` - -Congratulations, you're finished! Now anyone can access your site using -`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` - -Next steps: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/) - - -## How can I modify a ZeroNet site? - -* Modify files located in data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 directory. - After you're finished: - -```bash -$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... -Private key (input hidden): -``` - -* Enter the private key you got when you created the site, then: - -```bash -$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 -... -Site:13DNDk..bhC2 Publishing to 3/10 peers... -Site:13DNDk..bhC2 Successfuly published to 3 peers -- Serving files.... -``` - -* That's it! You've successfully signed and published your modifications. + * Click on **⋮** > **"Create new, empty site"** menu item on the site [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). + * You will be **redirected** to a completely new site that is only modifiable by you! + * You can find and modify your site's content in **data/[yoursiteaddress]** directory + * After the modifications open your site, drag the topright "0" button to left, then press **sign** and **publish** buttons on the bottom +Next steps: [ZeroNet Developer Documentation](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Help keep this project alive - -- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX -- Paypal: https://zeronet.io/docs/help_zeronet/donate/ - -### Sponsors - -* Better macOS/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com) +- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) +- LiberaPay: https://liberapay.com/PramUkesh +- Paypal: https://paypal.me/PramUkesh +- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) #### Thank you! -* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronet/ -* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/HelloZeroNet/ZeroNet) -* Email: hello@zeronet.io (PGP: CB9613AE) +* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronetx/ +* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/canewsin/ZeroNet) +* Email: canews.in@gmail.com diff --git a/plugins b/plugins new file mode 160000 index 00000000..689d9309 --- /dev/null +++ b/plugins @@ -0,0 +1 @@ +Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb diff --git a/plugins/AnnounceLocal/AnnounceLocalPlugin.py b/plugins/AnnounceLocal/AnnounceLocalPlugin.py deleted file mode 100644 index 27b4d38a..00000000 --- a/plugins/AnnounceLocal/AnnounceLocalPlugin.py +++ /dev/null @@ -1,148 +0,0 @@ -import time - -import gevent - -from Plugin import PluginManager -from Config import config -import BroadcastServer - - -@PluginManager.registerTo("SiteAnnouncer") -class SiteAnnouncerPlugin(object): - def announce(self, force=False, *args, **kwargs): - local_announcer = self.site.connection_server.local_announcer - - thread = None - if local_announcer and (force or time.time() - local_announcer.last_discover > 5 * 60): - thread = gevent.spawn(local_announcer.discover, force=force) - back = super(SiteAnnouncerPlugin, self).announce(force=force, *args, **kwargs) - - if thread: - thread.join() - - return back - - -class LocalAnnouncer(BroadcastServer.BroadcastServer): - def __init__(self, server, listen_port): - super(LocalAnnouncer, self).__init__("zeronet", listen_port=listen_port) - self.server = server - - self.sender_info["peer_id"] = self.server.peer_id - self.sender_info["port"] = self.server.port - self.sender_info["broadcast_port"] = listen_port - self.sender_info["rev"] = config.rev - - self.known_peers = {} - self.last_discover = 0 - - def discover(self, force=False): - self.log.debug("Sending discover request (force: %s)" % force) - self.last_discover = time.time() - if force: # Probably new site added, clean cache - self.known_peers = {} - - for peer_id, known_peer in self.known_peers.items(): - if time.time() - known_peer["found"] > 20 * 60: - del(self.known_peers[peer_id]) - self.log.debug("Timeout, removing from known_peers: %s" % peer_id) - self.broadcast({"cmd": "discoverRequest", "params": {}}, port=self.listen_port) - - def actionDiscoverRequest(self, sender, params): - back = { - "cmd": "discoverResponse", - "params": { - "sites_changed": self.server.site_manager.sites_changed - } - } - - if sender["peer_id"] not in self.known_peers: - self.known_peers[sender["peer_id"]] = {"added": time.time(), "sites_changed": 0, "updated": 0, "found": time.time()} - self.log.debug("Got discover request from unknown peer %s (%s), time to refresh known peers" % (sender["ip"], sender["peer_id"])) - gevent.spawn_later(1.0, self.discover) # Let the response arrive first to the requester - - return back - - def actionDiscoverResponse(self, sender, params): - if sender["peer_id"] in self.known_peers: - self.known_peers[sender["peer_id"]]["found"] = time.time() - if params["sites_changed"] != self.known_peers.get(sender["peer_id"], {}).get("sites_changed"): - # Peer's site list changed, request the list of new sites - return {"cmd": "siteListRequest"} - else: - # Peer's site list is the same - for site in self.server.sites.values(): - peer = site.peers.get("%s:%s" % (sender["ip"], sender["port"])) - if peer: - peer.found("local") - - def actionSiteListRequest(self, sender, params): - back = [] - sites = self.server.sites.values() - - # Split adresses to group of 100 to avoid UDP size limit - site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)] - for site_group in site_groups: - res = {} - res["sites_changed"] = self.server.site_manager.sites_changed - res["sites"] = [site.address_hash for site in site_group] - back.append({"cmd": "siteListResponse", "params": res}) - return back - - def actionSiteListResponse(self, sender, params): - s = time.time() - peer_sites = set(params["sites"]) - num_found = 0 - added_sites = [] - for site in self.server.sites.values(): - if site.address_hash in peer_sites: - added = site.addPeer(sender["ip"], sender["port"], source="local") - num_found += 1 - if added: - site.worker_manager.onPeers() - site.updateWebsocket(peers_added=1) - added_sites.append(site) - - # Save sites changed value to avoid unnecessary site list download - if sender["peer_id"] not in self.known_peers: - self.known_peers[sender["peer_id"]] = {"added": time.time()} - - self.known_peers[sender["peer_id"]]["sites_changed"] = params["sites_changed"] - self.known_peers[sender["peer_id"]]["updated"] = time.time() - self.known_peers[sender["peer_id"]]["found"] = time.time() - - self.log.debug( - "Tracker result: Discover from %s response parsed in %.3fs, found: %s added: %s of %s" % - (sender["ip"], time.time() - s, num_found, added_sites, len(peer_sites)) - ) - - -@PluginManager.registerTo("FileServer") -class FileServerPlugin(object): - def __init__(self, *args, **kwargs): - res = super(FileServerPlugin, self).__init__(*args, **kwargs) - if config.broadcast_port and config.tor != "always" and not config.disable_udp: - self.local_announcer = LocalAnnouncer(self, config.broadcast_port) - else: - self.local_announcer = None - return res - - def start(self, *args, **kwargs): - if self.local_announcer: - gevent.spawn(self.local_announcer.start) - return super(FileServerPlugin, self).start(*args, **kwargs) - - def stop(self): - if self.local_announcer: - self.local_announcer.stop() - res = super(FileServerPlugin, self).stop() - return res - - -@PluginManager.registerTo("ConfigPlugin") -class ConfigPlugin(object): - def createArguments(self): - group = self.parser.add_argument_group("AnnounceLocal plugin") - group.add_argument('--broadcast_port', help='UDP broadcasting port for local peer discovery', default=1544, type=int, metavar='port') - - return super(ConfigPlugin, self).createArguments() diff --git a/plugins/AnnounceLocal/BroadcastServer.py b/plugins/AnnounceLocal/BroadcastServer.py deleted file mode 100644 index 5863ad05..00000000 --- a/plugins/AnnounceLocal/BroadcastServer.py +++ /dev/null @@ -1,140 +0,0 @@ -import socket -import logging -import time -from contextlib import closing - -import msgpack - -from Debug import Debug -from util import UpnpPunch - - -class BroadcastServer(object): - def __init__(self, service_name, listen_port=1544, listen_ip=''): - self.log = logging.getLogger("BroadcastServer") - self.listen_port = listen_port - self.listen_ip = listen_ip - - self.running = False - self.sock = None - self.sender_info = {"service": service_name} - - def createBroadcastSocket(self): - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if hasattr(socket, 'SO_REUSEPORT'): - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - except Exception as err: - self.log.warning("Error setting SO_REUSEPORT: %s" % err) - - binded = False - for retry in range(3): - try: - sock.bind((self.listen_ip, self.listen_port)) - binded = True - break - except Exception as err: - self.log.error( - "Socket bind to %s:%s error: %s, retry #%s" % - (self.listen_ip, self.listen_port, Debug.formatException(err), retry) - ) - time.sleep(retry) - - if binded: - return sock - else: - return False - - def start(self): # Listens for discover requests - self.sock = self.createBroadcastSocket() - if not self.sock: - self.log.error("Unable to listen on port %s" % self.listen_port) - return - - self.log.debug("Started on port %s" % self.listen_port) - - self.running = True - - while self.running: - try: - data, addr = self.sock.recvfrom(8192) - except Exception as err: - if self.running: - self.log.error("Listener receive error: %s" % err) - continue - - if not self.running: - break - - try: - message = msgpack.unpackb(data) - response_addr, message = self.handleMessage(addr, message) - if message: - self.send(response_addr, message) - except Exception as err: - self.log.error("Handlemessage error: %s" % Debug.formatException(err)) - self.log.debug("Stopped listening on port %s" % self.listen_port) - - def stop(self): - self.log.debug("Stopping, socket: %s" % self.sock) - self.running = False - if self.sock: - self.sock.close() - - def send(self, addr, message): - if type(message) is not list: - message = [message] - - for message_part in message: - message_part["sender"] = self.sender_info - - self.log.debug("Send to %s: %s" % (addr, message_part["cmd"])) - with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock: - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.sendto(msgpack.packb(message_part), addr) - - def getMyIps(self): - return UpnpPunch._get_local_ips() - - def broadcast(self, message, port=None): - if not port: - port = self.listen_port - - my_ips = self.getMyIps() - addr = ("255.255.255.255", port) - - message["sender"] = self.sender_info - self.log.debug("Broadcast using ips %s on port %s: %s" % (my_ips, port, message["cmd"])) - - for my_ip in my_ips: - try: - with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock: - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) - sock.bind((my_ip, 0)) - sock.sendto(msgpack.packb(message), addr) - except Exception as err: - self.log.warning("Error sending broadcast using ip %s: %s" % (my_ip, err)) - - def handleMessage(self, addr, message): - self.log.debug("Got from %s: %s" % (addr, message["cmd"])) - cmd = message["cmd"] - params = message.get("params", {}) - sender = message["sender"] - sender["ip"] = addr[0] - - func_name = "action" + cmd[0].upper() + cmd[1:] - func = getattr(self, func_name, None) - - if sender["service"] != "zeronet" or sender["peer_id"] == self.sender_info["peer_id"]: - # Skip messages not for us or sent by us - message = None - elif func: - message = func(sender, params) - else: - self.log.debug("Unknown cmd: %s" % cmd) - message = None - - return (sender["ip"], sender["broadcast_port"]), message diff --git a/plugins/AnnounceLocal/Test/TestAnnounce.py b/plugins/AnnounceLocal/Test/TestAnnounce.py deleted file mode 100644 index 691ecc26..00000000 --- a/plugins/AnnounceLocal/Test/TestAnnounce.py +++ /dev/null @@ -1,113 +0,0 @@ -import time -import copy - -import gevent -import pytest -import mock - -from AnnounceLocal import AnnounceLocalPlugin -from File import FileServer -from Test import Spy - -@pytest.fixture -def announcer(file_server, site): - file_server.sites[site.address] = site - announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server, listen_port=1100) - file_server.local_announcer = announcer - announcer.listen_port = 1100 - announcer.sender_info["broadcast_port"] = 1100 - announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"]) - announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically - gevent.spawn(announcer.start) - time.sleep(0.5) - - assert file_server.local_announcer.running - return file_server.local_announcer - -@pytest.fixture -def announcer_remote(request, site_temp): - file_server_remote = FileServer("127.0.0.1", 1545) - file_server_remote.sites[site_temp.address] = site_temp - announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server_remote, listen_port=1101) - file_server_remote.local_announcer = announcer - announcer.listen_port = 1101 - announcer.sender_info["broadcast_port"] = 1101 - announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"]) - announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically - gevent.spawn(announcer.start) - time.sleep(0.5) - - assert file_server_remote.local_announcer.running - - def cleanup(): - file_server_remote.stop() - request.addfinalizer(cleanup) - - - return file_server_remote.local_announcer - -@pytest.mark.usefixtures("resetSettings") -@pytest.mark.usefixtures("resetTempSettings") -class TestAnnounce: - def testSenderInfo(self, announcer): - sender_info = announcer.sender_info - assert sender_info["port"] > 0 - assert len(sender_info["peer_id"]) == 20 - assert sender_info["rev"] > 0 - - def testIgnoreSelfMessages(self, announcer): - # No response to messages that has same peer_id as server - assert not announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": announcer.sender_info, "params": {}})[1] - - # Response to messages with different peer id - sender_info = copy.copy(announcer.sender_info) - sender_info["peer_id"] += "-" - addr, res = announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": sender_info, "params": {}}) - assert res["params"]["sites_changed"] > 0 - - def testDiscoverRequest(self, announcer, announcer_remote): - assert len(announcer_remote.known_peers) == 0 - with Spy.Spy(announcer_remote, "handleMessage") as responses: - announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port) - time.sleep(0.1) - - response_cmds = [response[1]["cmd"] for response in responses] - assert response_cmds == ["discoverResponse", "siteListResponse"] - assert len(responses[-1][1]["params"]["sites"]) == 1 - - # It should only request siteList if sites_changed value is different from last response - with Spy.Spy(announcer_remote, "handleMessage") as responses: - announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port) - time.sleep(0.1) - - response_cmds = [response[1]["cmd"] for response in responses] - assert response_cmds == ["discoverResponse"] - - def testPeerDiscover(self, announcer, announcer_remote, site): - assert announcer.server.peer_id != announcer_remote.server.peer_id - assert len(announcer.server.sites.values()[0].peers) == 0 - announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port) - time.sleep(0.1) - assert len(announcer.server.sites.values()[0].peers) == 1 - - def testRecentPeerList(self, announcer, announcer_remote, site): - assert len(site.peers_recent) == 0 - assert len(site.peers) == 0 - with Spy.Spy(announcer, "handleMessage") as responses: - announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) - time.sleep(0.1) - assert [response[1]["cmd"] for response in responses] == ["discoverResponse", "siteListResponse"] - assert len(site.peers_recent) == 1 - assert len(site.peers) == 1 - - # It should update peer without siteListResponse - last_time_found = site.peers.values()[0].time_found - site.peers_recent.clear() - with Spy.Spy(announcer, "handleMessage") as responses: - announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) - time.sleep(0.1) - assert [response[1]["cmd"] for response in responses] == ["discoverResponse"] - assert len(site.peers_recent) == 1 - assert site.peers.values()[0].time_found > last_time_found - - diff --git a/plugins/AnnounceLocal/Test/conftest.py b/plugins/AnnounceLocal/Test/conftest.py deleted file mode 100644 index a88c642c..00000000 --- a/plugins/AnnounceLocal/Test/conftest.py +++ /dev/null @@ -1,4 +0,0 @@ -from src.Test.conftest import * - -from Config import config -config.broadcast_port = 0 diff --git a/plugins/AnnounceLocal/Test/pytest.ini b/plugins/AnnounceLocal/Test/pytest.ini deleted file mode 100644 index d09210d1..00000000 --- a/plugins/AnnounceLocal/Test/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = Test*.py -addopts = -rsxX -v --durations=6 -markers = - webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/AnnounceLocal/__init__.py b/plugins/AnnounceLocal/__init__.py deleted file mode 100644 index defe2412..00000000 --- a/plugins/AnnounceLocal/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import AnnounceLocalPlugin \ No newline at end of file diff --git a/plugins/AnnounceShare/AnnounceSharePlugin.py b/plugins/AnnounceShare/AnnounceSharePlugin.py deleted file mode 100644 index 10e3a3e6..00000000 --- a/plugins/AnnounceShare/AnnounceSharePlugin.py +++ /dev/null @@ -1,188 +0,0 @@ -import time -import os -import logging -import json -import atexit - -import gevent - -from Config import config -from Plugin import PluginManager -from util import helper - - -class TrackerStorage(object): - def __init__(self): - self.log = logging.getLogger("TrackerStorage") - self.file_path = "%s/trackers.json" % config.data_dir - self.load() - self.time_discover = 0.0 - atexit.register(self.save) - - def getDefaultFile(self): - return {"shared": {}} - - def onTrackerFound(self, tracker_address, type="shared", my=False): - if not tracker_address.startswith("zero://"): - return False - - trackers = self.getTrackers() - added = False - if tracker_address not in trackers: - trackers[tracker_address] = { - "time_added": time.time(), - "time_success": 0, - "latency": 99.0, - "num_error": 0, - "my": False - } - self.log.debug("New tracker found: %s" % tracker_address) - added = True - - trackers[tracker_address]["time_found"] = time.time() - trackers[tracker_address]["my"] = my - return added - - def onTrackerSuccess(self, tracker_address, latency): - trackers = self.getTrackers() - if tracker_address not in trackers: - return False - - trackers[tracker_address]["latency"] = latency - trackers[tracker_address]["time_success"] = time.time() - trackers[tracker_address]["num_error"] = 0 - - def onTrackerError(self, tracker_address): - trackers = self.getTrackers() - if tracker_address not in trackers: - return False - - trackers[tracker_address]["time_error"] = time.time() - trackers[tracker_address]["num_error"] += 1 - - if len(self.getWorkingTrackers()) >= config.working_shared_trackers_limit: - error_limit = 5 - else: - error_limit = 30 - error_limit - - if trackers[tracker_address]["num_error"] > error_limit and trackers[tracker_address]["time_success"] < time.time() - 60 * 60: - self.log.debug("Tracker %s looks down, removing." % tracker_address) - del trackers[tracker_address] - - def getTrackers(self, type="shared"): - return self.file_content.setdefault(type, {}) - - def getWorkingTrackers(self, type="shared"): - trackers = { - key: tracker for key, tracker in self.getTrackers(type).iteritems() - if tracker["time_success"] > time.time() - 60 * 60 - } - return trackers - - def getFileContent(self): - if not os.path.isfile(self.file_path): - open(self.file_path, "w").write("{}") - return self.getDefaultFile() - try: - return json.load(open(self.file_path)) - except Exception as err: - self.log.error("Error loading trackers list: %s" % err) - return self.getDefaultFile() - - def load(self): - self.file_content = self.getFileContent() - - trackers = self.getTrackers() - self.log.debug("Loaded %s shared trackers" % len(trackers)) - for address, tracker in trackers.items(): - tracker["num_error"] = 0 - if not address.startswith("zero://"): - del trackers[address] - - def save(self): - s = time.time() - helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True)) - self.log.debug("Saved in %.3fs" % (time.time() - s)) - - def discoverTrackers(self, peers): - if len(self.getWorkingTrackers()) > config.working_shared_trackers_limit: - return False - s = time.time() - num_success = 0 - for peer in peers: - if peer.connection and peer.connection.handshake.get("rev", 0) < 3560: - continue # Not supported - - res = peer.request("getTrackers") - if not res or "error" in res: - continue - - num_success += 1 - for tracker_address in res["trackers"]: - added = self.onTrackerFound(tracker_address) - if added: # Only add one tracker from one source - break - - if not num_success and len(peers) < 20: - self.time_discover = 0.0 - - if num_success: - self.save() - - self.log.debug("Trackers discovered from %s/%s peers in %.3fs" % (num_success, len(peers), time.time() - s)) - - -if "tracker_storage" not in locals(): - tracker_storage = TrackerStorage() - - -@PluginManager.registerTo("SiteAnnouncer") -class SiteAnnouncerPlugin(object): - def getTrackers(self): - if tracker_storage.time_discover < time.time() - 5 * 60: - tracker_storage.time_discover = time.time() - gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers()) - trackers = super(SiteAnnouncerPlugin, self).getTrackers() - shared_trackers = tracker_storage.getTrackers("shared").keys() - if shared_trackers: - return trackers + shared_trackers - else: - return trackers - - def announceTracker(self, tracker, *args, **kwargs): - res = super(SiteAnnouncerPlugin, self).announceTracker(tracker, *args, **kwargs) - if res: - latency = res - tracker_storage.onTrackerSuccess(tracker, latency) - elif res is False: - tracker_storage.onTrackerError(tracker) - - return res - - -@PluginManager.registerTo("FileRequest") -class FileRequestPlugin(object): - def actionGetTrackers(self, params): - shared_trackers = tracker_storage.getWorkingTrackers("shared").keys() - self.response({"trackers": shared_trackers}) - - -@PluginManager.registerTo("FileServer") -class FileServerPlugin(object): - def portCheck(self, *args, **kwargs): - res = super(FileServerPlugin, self).portCheck(*args, **kwargs) - if res and not config.tor == "always" and "Bootstrapper" in PluginManager.plugin_manager.plugin_names: - for ip in self.ip_external_list: - my_tracker_address = "zero://%s:%s" % (ip, config.fileserver_port) - tracker_storage.onTrackerFound(my_tracker_address, my=True) - return res - - -@PluginManager.registerTo("ConfigPlugin") -class ConfigPlugin(object): - def createArguments(self): - group = self.parser.add_argument_group("AnnounceShare plugin") - group.add_argument('--working_shared_trackers_limit', help='Stop discovering new shared trackers after this number of shared trackers reached', default=5, type=int, metavar='limit') - - return super(ConfigPlugin, self).createArguments() diff --git a/plugins/AnnounceShare/Test/TestAnnounceShare.py b/plugins/AnnounceShare/Test/TestAnnounceShare.py deleted file mode 100644 index 4608eda7..00000000 --- a/plugins/AnnounceShare/Test/TestAnnounceShare.py +++ /dev/null @@ -1,25 +0,0 @@ -import pytest - -from AnnounceShare import AnnounceSharePlugin -from Peer import Peer -from Config import config - - -@pytest.mark.usefixtures("resetSettings") -@pytest.mark.usefixtures("resetTempSettings") -class TestAnnounceShare: - def testAnnounceList(self, file_server): - open("%s/trackers.json" % config.data_dir, "w").write("{}") - tracker_storage = AnnounceSharePlugin.tracker_storage - tracker_storage.load() - print tracker_storage.file_path, config.data_dir - peer = Peer(file_server.ip, 1544, connection_server=file_server) - assert peer.request("getTrackers")["trackers"] == [] - - tracker_storage.onTrackerFound("zero://%s:15441" % file_server.ip) - assert peer.request("getTrackers")["trackers"] == [] - - # It needs to have at least one successfull announce to be shared to other peers - tracker_storage.onTrackerSuccess("zero://%s:15441" % file_server.ip, 1.0) - assert peer.request("getTrackers")["trackers"] == ["zero://%s:15441" % file_server.ip] - diff --git a/plugins/AnnounceShare/Test/conftest.py b/plugins/AnnounceShare/Test/conftest.py deleted file mode 100644 index 5abd4dd6..00000000 --- a/plugins/AnnounceShare/Test/conftest.py +++ /dev/null @@ -1,3 +0,0 @@ -from src.Test.conftest import * - -from Config import config diff --git a/plugins/AnnounceShare/Test/pytest.ini b/plugins/AnnounceShare/Test/pytest.ini deleted file mode 100644 index d09210d1..00000000 --- a/plugins/AnnounceShare/Test/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = Test*.py -addopts = -rsxX -v --durations=6 -markers = - webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/AnnounceShare/__init__.py b/plugins/AnnounceShare/__init__.py deleted file mode 100644 index f55cb2c6..00000000 --- a/plugins/AnnounceShare/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import AnnounceSharePlugin diff --git a/plugins/AnnounceZero/AnnounceZeroPlugin.py b/plugins/AnnounceZero/AnnounceZeroPlugin.py deleted file mode 100644 index b7f9e823..00000000 --- a/plugins/AnnounceZero/AnnounceZeroPlugin.py +++ /dev/null @@ -1,138 +0,0 @@ -import time -import itertools - -from Plugin import PluginManager -from util import helper -from Crypt import CryptRsa - -allow_reload = False # No source reload supported in this plugin -time_full_announced = {} # Tracker address: Last announced all site to tracker -connection_pool = {} # Tracker address: Peer object - - -# We can only import plugin host clases after the plugins are loaded -@PluginManager.afterLoad -def importHostClasses(): - global Peer, AnnounceError - from Peer import Peer - from Site.SiteAnnouncer import AnnounceError - - -# Process result got back from tracker -def processPeerRes(tracker_address, site, peers): - added = 0 - # Ip4 - found_ipv4 = 0 - peers_normal = itertools.chain(peers.get("ip4", []), peers.get("ipv4", []), peers.get("ipv6", [])) - for packed_address in peers_normal: - found_ipv4 += 1 - peer_ip, peer_port = helper.unpackAddress(packed_address) - if site.addPeer(peer_ip, peer_port, source="tracker"): - added += 1 - # Onion - found_onion = 0 - for packed_address in peers["onion"]: - found_onion += 1 - peer_onion, peer_port = helper.unpackOnionAddress(packed_address) - if site.addPeer(peer_onion, peer_port, source="tracker"): - added += 1 - - if added: - site.worker_manager.onPeers() - site.updateWebsocket(peers_added=added) - return added - - -@PluginManager.registerTo("SiteAnnouncer") -class SiteAnnouncerPlugin(object): - def getTrackerHandler(self, protocol): - if protocol == "zero": - return self.announceTrackerZero - else: - return super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol) - - def announceTrackerZero(self, tracker_address, mode="start", num_want=10): - global time_full_announced - s = time.time() - - need_types = ["ip4"] # ip4 for backward compatibility reasons - need_types += self.site.connection_server.supported_ip_types - if self.site.connection_server.tor_manager.enabled: - need_types.append("onion") - - if mode == "start" or mode == "more": # Single: Announce only this site - sites = [self.site] - full_announce = False - else: # Multi: Announce all currently serving site - full_announce = True - if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 15: # No reannounce all sites within short time - return None - time_full_announced[tracker_address] = time.time() - from Site import SiteManager - sites = [site for site in SiteManager.site_manager.sites.values() if site.settings["serving"]] - - # Create request - add_types = self.getOpenedServiceTypes() - request = { - "hashes": [], "onions": [], "port": self.fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types - } - for site in sites: - if "onion" in add_types: - onion = self.site.connection_server.tor_manager.getOnion(site.address) - request["onions"].append(onion) - request["hashes"].append(site.address_hash) - - # Tracker can remove sites that we don't announce - if full_announce: - request["delete"] = True - - # Sent request to tracker - tracker_peer = connection_pool.get(tracker_address) # Re-use tracker connection if possible - if not tracker_peer: - tracker_ip, tracker_port = tracker_address.rsplit(":", 1) - tracker_peer = Peer(str(tracker_ip), int(tracker_port), connection_server=self.site.connection_server) - tracker_peer.is_tracker_connection = True - connection_pool[tracker_address] = tracker_peer - - res = tracker_peer.request("announce", request) - - if not res or "peers" not in res: - if full_announce: - time_full_announced[tracker_address] = 0 - raise AnnounceError("Invalid response: %s" % res) - - # Add peers from response to site - site_index = 0 - peers_added = 0 - for site_res in res["peers"]: - site = sites[site_index] - peers_added += processPeerRes(tracker_address, site, site_res) - site_index += 1 - - # Check if we need to sign prove the onion addresses - if "onion_sign_this" in res: - self.site.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites))) - request["onion_signs"] = {} - request["onion_sign_this"] = res["onion_sign_this"] - request["need_num"] = 0 - for site in sites: - onion = self.site.connection_server.tor_manager.getOnion(site.address) - publickey = self.site.connection_server.tor_manager.getPublickey(onion) - if publickey not in request["onion_signs"]: - sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion)) - request["onion_signs"][publickey] = sign - res = tracker_peer.request("announce", request) - if not res or "onion_sign_this" in res: - if full_announce: - time_full_announced[tracker_address] = 0 - raise AnnounceError("Announce onion address to failed: %s" % res) - - if full_announce: - tracker_peer.remove() # Close connection, we don't need it in next 5 minute - - self.site.log.debug( - "Tracker announce result: zero://%s (sites: %s, new peers: %s) in %.3fs" % - (tracker_address, site_index, peers_added, time.time() - s) - ) - - return True diff --git a/plugins/AnnounceZero/__init__.py b/plugins/AnnounceZero/__init__.py deleted file mode 100644 index 4b9cbe10..00000000 --- a/plugins/AnnounceZero/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import AnnounceZeroPlugin \ No newline at end of file diff --git a/plugins/Bigfile/BigfilePiecefield.py b/plugins/Bigfile/BigfilePiecefield.py deleted file mode 100644 index c7690279..00000000 --- a/plugins/Bigfile/BigfilePiecefield.py +++ /dev/null @@ -1,158 +0,0 @@ -import array - - -def packPiecefield(data): - res = [] - if not data: - return array.array("H", "") - - if data[0] == "0": - res.append(0) - find = "1" - else: - find = "0" - last_pos = 0 - pos = 0 - while 1: - pos = data.find(find, pos) - if find == "0": - find = "1" - else: - find = "0" - if pos == -1: - res.append(len(data) - last_pos) - break - res.append(pos - last_pos) - last_pos = pos - return array.array("H", res) - - -def unpackPiecefield(data): - if not data: - return "" - - res = [] - char = "1" - for times in data: - if times > 10000: - return "" - res.append(char * times) - if char == "1": - char = "0" - else: - char = "1" - return "".join(res) - - -class BigfilePiecefield(object): - __slots__ = ["data"] - - def __init__(self): - self.data = "" - - def fromstring(self, s): - self.data = s - - def tostring(self): - return self.data - - def pack(self): - return packPiecefield(self.data).tostring() - - def unpack(self, s): - self.data = unpackPiecefield(array.array("H", s)) - - def __getitem__(self, key): - try: - return int(self.data[key]) - except IndexError: - return False - - def __setitem__(self, key, value): - data = self.data - if len(data) < key: - data = data.ljust(key+1, "0") - data = data[:key] + str(int(value)) + data[key + 1:] - self.data = data - - -class BigfilePiecefieldPacked(object): - __slots__ = ["data"] - - def __init__(self): - self.data = "" - - def fromstring(self, data): - self.data = packPiecefield(data).tostring() - - def tostring(self): - return unpackPiecefield(array.array("H", self.data)) - - def pack(self): - return array.array("H", self.data).tostring() - - def unpack(self, data): - self.data = data - - def __getitem__(self, key): - try: - return int(self.tostring()[key]) - except IndexError: - return False - - def __setitem__(self, key, value): - data = self.tostring() - if len(data) < key: - data = data.ljust(key+1, "0") - data = data[:key] + str(int(value)) + data[key + 1:] - self.fromstring(data) - - -if __name__ == "__main__": - import os - import psutil - import time - testdata = "1" * 100 + "0" * 900 + "1" * 4000 + "0" * 4999 + "1" - meminfo = psutil.Process(os.getpid()).memory_info - - for storage in [BigfilePiecefieldPacked, BigfilePiecefield]: - print "-- Testing storage: %s --" % storage - m = meminfo()[0] - s = time.time() - piecefields = {} - for i in range(10000): - piecefield = storage() - piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:]) - piecefields[i] = piecefield - - print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) - - m = meminfo()[0] - s = time.time() - for piecefield in piecefields.values(): - val = piecefield[1000] - - print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) - - m = meminfo()[0] - s = time.time() - for piecefield in piecefields.values(): - piecefield[1000] = True - - print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) - - m = meminfo()[0] - s = time.time() - for piecefield in piecefields.values(): - packed = piecefield.pack() - - print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)) - - m = meminfo()[0] - s = time.time() - for piecefield in piecefields.values(): - piecefield.unpack(packed) - - print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) - - piecefields = {} diff --git a/plugins/Bigfile/BigfilePlugin.py b/plugins/Bigfile/BigfilePlugin.py deleted file mode 100644 index d9b4ff1d..00000000 --- a/plugins/Bigfile/BigfilePlugin.py +++ /dev/null @@ -1,769 +0,0 @@ -import time -import os -import subprocess -import shutil -import collections -import math -import json - -import msgpack -import gevent -import gevent.lock - -from Plugin import PluginManager -from Debug import Debug -from Crypt import CryptHash -from lib import merkletools -from util import helper -import util -from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked - - -# We can only import plugin host clases after the plugins are loaded -@PluginManager.afterLoad -def importPluginnedClasses(): - global VerifyError, config - from Content.ContentManager import VerifyError - from Config import config - -if "upload_nonces" not in locals(): - upload_nonces = {} - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - def isCorsAllowed(self, path): - if path == "/ZeroNet-Internal/BigfileUpload": - return True - else: - return super(UiRequestPlugin, self).isCorsAllowed(path) - - def actionBigfileUpload(self): - nonce = self.get.get("upload_nonce") - if nonce not in upload_nonces: - return self.error403("Upload nonce error.") - - upload_info = upload_nonces[nonce] - del upload_nonces[nonce] - - self.sendHeader(200, "text/html", noscript=True, extra_headers={ - "Access-Control-Allow-Origin": "null", - "Access-Control-Allow-Credentials": "true" - }) - - self.readMultipartHeaders(self.env['wsgi.input']) # Skip http headers - - site = upload_info["site"] - inner_path = upload_info["inner_path"] - - with site.storage.open(inner_path, "wb", create_dirs=True) as out_file: - merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile( - self.env['wsgi.input'], upload_info["size"], upload_info["piece_size"], out_file - ) - - if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split - hash = piecemap_info["sha512_pieces"][0].encode("hex") - hash_id = site.content_manager.hashfield.getHashId(hash) - site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True) - - else: # Big file - file_name = helper.getFilename(inner_path) - msgpack.pack({file_name: piecemap_info}, site.storage.open(upload_info["piecemap"], "wb")) - - # Find piecemap and file relative path to content.json - file_info = site.content_manager.getFileInfo(inner_path, new_file=True) - content_inner_path_dir = helper.getDirname(file_info["content_inner_path"]) - piecemap_relative_path = upload_info["piecemap"][len(content_inner_path_dir):] - file_relative_path = inner_path[len(content_inner_path_dir):] - - # Add file to content.json - if site.storage.isFile(file_info["content_inner_path"]): - content = site.storage.loadJson(file_info["content_inner_path"]) - else: - content = {} - if "files_optional" not in content: - content["files_optional"] = {} - - content["files_optional"][file_relative_path] = { - "sha512": merkle_root, - "size": upload_info["size"], - "piecemap": piecemap_relative_path, - "piece_size": piece_size - } - - merkle_root_hash_id = site.content_manager.hashfield.getHashId(merkle_root) - site.content_manager.optionalDownloaded(inner_path, merkle_root_hash_id, upload_info["size"], own=True) - site.storage.writeJson(file_info["content_inner_path"], content) - - site.content_manager.contents.loadItem(file_info["content_inner_path"]) # reload cache - - return json.dumps({ - "merkle_root": merkle_root, - "piece_num": len(piecemap_info["sha512_pieces"]), - "piece_size": piece_size, - "inner_path": inner_path - }) - - def readMultipartHeaders(self, wsgi_input): - for i in range(100): - line = wsgi_input.readline() - if line == "\r\n": - break - return i - - def actionFile(self, file_path, *args, **kwargs): - if kwargs.get("file_size", 0) > 1024 * 1024 and kwargs.get("path_parts"): # Only check files larger than 1MB - path_parts = kwargs["path_parts"] - site = self.server.site_manager.get(path_parts["address"]) - big_file = site.storage.openBigfile(path_parts["inner_path"], prebuffer=2 * 1024 * 1024) - if big_file: - kwargs["file_obj"] = big_file - kwargs["file_size"] = big_file.size - - return super(UiRequestPlugin, self).actionFile(file_path, *args, **kwargs) - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def actionBigfileUploadInit(self, to, inner_path, size): - valid_signers = self.site.content_manager.getValidSigners(inner_path) - auth_address = self.user.getAuthAddress(self.site.address) - if not self.site.settings["own"] and auth_address not in valid_signers: - self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers)) - return self.response(to, {"error": "Forbidden, you can only modify your own files"}) - - nonce = CryptHash.random() - piece_size = 1024 * 1024 - inner_path = self.site.content_manager.sanitizePath(inner_path) - file_info = self.site.content_manager.getFileInfo(inner_path, new_file=True) - - content_inner_path_dir = helper.getDirname(file_info["content_inner_path"]) - file_relative_path = inner_path[len(content_inner_path_dir):] - - upload_nonces[nonce] = { - "added": time.time(), - "site": self.site, - "inner_path": inner_path, - "websocket_client": self, - "size": size, - "piece_size": piece_size, - "piecemap": inner_path + ".piecemap.msgpack" - } - return { - "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce, - "piece_size": piece_size, - "inner_path": inner_path, - "file_relative_path": file_relative_path - } - - def actionSiteSetAutodownloadBigfileLimit(self, to, limit): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - - self.site.settings["autodownload_bigfile_size_limit"] = int(limit) - self.response(to, "ok") - - def actionFileDelete(self, to, inner_path): - piecemap_inner_path = inner_path + ".piecemap.msgpack" - if self.hasFilePermission(inner_path) and self.site.storage.isFile(piecemap_inner_path): - # Also delete .piecemap.msgpack file if exists - self.log.debug("Deleting piecemap: %s" % piecemap_inner_path) - file_info = self.site.content_manager.getFileInfo(piecemap_inner_path) - if file_info: - content_json = self.site.storage.loadJson(file_info["content_inner_path"]) - relative_path = file_info["relative_path"] - if relative_path in content_json.get("files_optional", {}): - del content_json["files_optional"][relative_path] - self.site.storage.writeJson(file_info["content_inner_path"], content_json) - self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) - try: - self.site.storage.delete(piecemap_inner_path) - except Exception, err: - self.log.error("File %s delete error: %s" % (piecemap_inner_path, err)) - - return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path) - - -@PluginManager.registerTo("ContentManager") -class ContentManagerPlugin(object): - def getFileInfo(self, inner_path, *args, **kwargs): - if "|" not in inner_path: - return super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs) - - inner_path, file_range = inner_path.split("|") - pos_from, pos_to = map(int, file_range.split("-")) - file_info = super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs) - return file_info - - def readFile(self, file_in, size, buff_size=1024 * 64): - part_num = 0 - recv_left = size - - while 1: - part_num += 1 - read_size = min(buff_size, recv_left) - part = file_in.read(read_size) - - if not part: - break - yield part - - if part_num % 100 == 0: # Avoid blocking ZeroNet execution during upload - time.sleep(0.001) - - recv_left -= read_size - if recv_left <= 0: - break - - def hashBigfile(self, file_in, size, piece_size=1024 * 1024, file_out=None): - self.site.settings["has_bigfile"] = True - - recv = 0 - try: - piece_hash = CryptHash.sha512t() - piece_hashes = [] - piece_recv = 0 - - mt = merkletools.MerkleTools() - mt.hash_function = CryptHash.sha512t - - part = "" - for part in self.readFile(file_in, size): - if file_out: - file_out.write(part) - - recv += len(part) - piece_recv += len(part) - piece_hash.update(part) - if piece_recv >= piece_size: - piece_digest = piece_hash.digest() - piece_hashes.append(piece_digest) - mt.leaves.append(piece_digest) - piece_hash = CryptHash.sha512t() - piece_recv = 0 - - if len(piece_hashes) % 100 == 0 or recv == size: - self.log.info("- [HASHING:%.0f%%] Pieces: %s, %.1fMB/%.1fMB" % ( - float(recv) / size * 100, len(piece_hashes), recv / 1024 / 1024, size / 1024 / 1024 - )) - part = "" - if len(part) > 0: - piece_digest = piece_hash.digest() - piece_hashes.append(piece_digest) - mt.leaves.append(piece_digest) - except Exception as err: - raise err - finally: - if file_out: - file_out.close() - - mt.make_tree() - return mt.get_merkle_root(), piece_size, { - "sha512_pieces": piece_hashes - } - - def hashFile(self, dir_inner_path, file_relative_path, optional=False): - inner_path = dir_inner_path + file_relative_path - - file_size = self.site.storage.getSize(inner_path) - # Only care about optional files >1MB - if not optional or file_size < 1 * 1024 * 1024: - return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional) - - back = {} - content = self.contents.get(dir_inner_path + "content.json") - - hash = None - piecemap_relative_path = None - piece_size = None - - # Don't re-hash if it's already in content.json - if content and file_relative_path in content.get("files_optional", {}): - file_node = content["files_optional"][file_relative_path] - if file_node["size"] == file_size: - self.log.info("- [SAME SIZE] %s" % file_relative_path) - hash = file_node.get("sha512") - piecemap_relative_path = file_node.get("piecemap") - piece_size = file_node.get("piece_size") - - if not hash or not piecemap_relative_path: # Not in content.json yet - if file_size < 5 * 1024 * 1024: # Don't create piecemap automatically for files smaller than 5MB - return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional) - - self.log.info("- [HASHING] %s" % file_relative_path) - merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size) - if not hash: - hash = merkle_root - - if not piecemap_relative_path: - file_name = helper.getFilename(file_relative_path) - piecemap_relative_path = file_relative_path + ".piecemap.msgpack" - piecemap_inner_path = inner_path + ".piecemap.msgpack" - - msgpack.pack({file_name: piecemap_info}, self.site.storage.open(piecemap_inner_path, "wb")) - - back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True)) - - piece_num = int(math.ceil(float(file_size) / piece_size)) - - # Add the merkle root to hashfield - hash_id = self.site.content_manager.hashfield.getHashId(hash) - self.optionalDownloaded(inner_path, hash_id, file_size, own=True) - self.site.storage.piecefields[hash].fromstring("1" * piece_num) - - back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size} - return back - - def getPiecemap(self, inner_path): - file_info = self.site.content_manager.getFileInfo(inner_path) - piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"] - self.site.needFile(piecemap_inner_path, priority=20) - piecemap = msgpack.unpack(self.site.storage.open(piecemap_inner_path))[helper.getFilename(inner_path)] - piecemap["piece_size"] = file_info["piece_size"] - return piecemap - - def verifyPiece(self, inner_path, pos, piece): - piecemap = self.getPiecemap(inner_path) - piece_i = pos / piecemap["piece_size"] - if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]: - raise VerifyError("Invalid hash") - return True - - def verifyFile(self, inner_path, file, ignore_same=True): - if "|" not in inner_path: - return super(ContentManagerPlugin, self).verifyFile(inner_path, file, ignore_same) - - inner_path, file_range = inner_path.split("|") - pos_from, pos_to = map(int, file_range.split("-")) - - return self.verifyPiece(inner_path, pos_from, file) - - def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): - if "|" in inner_path: - inner_path, file_range = inner_path.split("|") - pos_from, pos_to = map(int, file_range.split("-")) - file_info = self.getFileInfo(inner_path) - - # Mark piece downloaded - piece_i = pos_from / file_info["piece_size"] - self.site.storage.piecefields[file_info["sha512"]][piece_i] = True - - # Only add to site size on first request - if hash_id in self.hashfield: - size = 0 - elif size > 1024 * 1024: - file_info = self.getFileInfo(inner_path) - if file_info and "sha512" in file_info: # We already have the file, but not in piecefield - sha512 = file_info["sha512"] - if sha512 not in self.site.storage.piecefields: - self.site.storage.checkBigfile(inner_path) - - return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own) - - def optionalRemoved(self, inner_path, hash_id, size=None): - if size and size > 1024 * 1024: - file_info = self.getFileInfo(inner_path) - sha512 = file_info["sha512"] - if sha512 in self.site.storage.piecefields: - del self.site.storage.piecefields[sha512] - - # Also remove other pieces of the file from download queue - for key in self.site.bad_files.keys(): - if key.startswith(inner_path + "|"): - del self.site.bad_files[key] - self.site.worker_manager.removeSolvedFileTasks() - return super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size) - - -@PluginManager.registerTo("SiteStorage") -class SiteStoragePlugin(object): - def __init__(self, *args, **kwargs): - super(SiteStoragePlugin, self).__init__(*args, **kwargs) - self.piecefields = collections.defaultdict(BigfilePiecefield) - if "piecefields" in self.site.settings.get("cache", {}): - for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems(): - if piecefield_packed: - self.piecefields[sha512].unpack(piecefield_packed.decode("base64")) - self.site.settings["cache"]["piecefields"] = {} - - def createSparseFile(self, inner_path, size, sha512=None): - file_path = self.getPath(inner_path) - - file_dir = os.path.dirname(file_path) - if not os.path.isdir(file_dir): - os.makedirs(file_dir) - - f = open(file_path, 'wb') - f.truncate(min(1024 * 1024 * 5, size)) # Only pre-allocate up to 5MB - f.close() - if os.name == "nt": - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - subprocess.call(["fsutil", "sparse", "setflag", file_path], close_fds=True, startupinfo=startupinfo) - - if sha512 and sha512 in self.piecefields: - self.log.debug("%s: File not exists, but has piecefield. Deleting piecefield." % inner_path) - del self.piecefields[sha512] - - def write(self, inner_path, content): - if "|" not in inner_path: - return super(SiteStoragePlugin, self).write(inner_path, content) - - # Write to specific position by passing |{pos} after the filename - inner_path, file_range = inner_path.split("|") - pos_from, pos_to = map(int, file_range.split("-")) - file_path = self.getPath(inner_path) - - # Create dir if not exist - file_dir = os.path.dirname(file_path) - if not os.path.isdir(file_dir): - os.makedirs(file_dir) - - if not os.path.isfile(file_path): - file_info = self.site.content_manager.getFileInfo(inner_path) - self.createSparseFile(inner_path, file_info["size"]) - - # Write file - with open(file_path, "rb+") as file: - file.seek(pos_from) - if hasattr(content, 'read'): # File-like object - shutil.copyfileobj(content, file) # Write buff to disk - else: # Simple string - file.write(content) - del content - self.onUpdated(inner_path) - - def checkBigfile(self, inner_path): - file_info = self.site.content_manager.getFileInfo(inner_path) - if not file_info or (file_info and "piecemap" not in file_info): # It's not a big file - return False - - self.site.settings["has_bigfile"] = True - file_path = self.getPath(inner_path) - sha512 = file_info["sha512"] - piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"])) - if os.path.isfile(file_path): - if sha512 not in self.piecefields: - if open(file_path).read(128) == "\0" * 128: - piece_data = "0" - else: - piece_data = "1" - self.log.debug("%s: File exists, but not in piecefield. Filling piecefiled with %s * %s." % (inner_path, piece_num, piece_data)) - self.piecefields[sha512].fromstring(piece_data * piece_num) - else: - self.log.debug("Creating bigfile: %s" % inner_path) - self.createSparseFile(inner_path, file_info["size"], sha512) - self.piecefields[sha512].fromstring("0" * piece_num) - return True - - def openBigfile(self, inner_path, prebuffer=0): - if not self.checkBigfile(inner_path): - return False - self.site.needFile(inner_path, blocking=False) # Download piecemap - return BigFile(self.site, inner_path, prebuffer=prebuffer) - - -class BigFile(object): - def __init__(self, site, inner_path, prebuffer=0): - self.site = site - self.inner_path = inner_path - file_path = site.storage.getPath(inner_path) - file_info = self.site.content_manager.getFileInfo(inner_path) - self.piece_size = file_info["piece_size"] - self.sha512 = file_info["sha512"] - self.size = file_info["size"] - self.prebuffer = prebuffer - self.read_bytes = 0 - - self.piecefield = self.site.storage.piecefields[self.sha512] - self.f = open(file_path, "rb+") - self.read_lock = gevent.lock.Semaphore() - - def read(self, buff=64 * 1024): - with self.read_lock: - pos = self.f.tell() - read_until = min(self.size, pos + buff) - requests = [] - # Request all required blocks - while 1: - piece_i = pos / self.piece_size - if piece_i * self.piece_size >= read_until: - break - pos_from = piece_i * self.piece_size - pos_to = pos_from + self.piece_size - if not self.piecefield[piece_i]: - requests.append(self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=10)) - pos += self.piece_size - - if not all(requests): - return None - - # Request prebuffer - if self.prebuffer: - prebuffer_until = min(self.size, read_until + self.prebuffer) - priority = 3 - while 1: - piece_i = pos / self.piece_size - if piece_i * self.piece_size >= prebuffer_until: - break - pos_from = piece_i * self.piece_size - pos_to = pos_from + self.piece_size - if not self.piecefield[piece_i]: - self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=max(0, priority)) - priority -= 1 - pos += self.piece_size - - gevent.joinall(requests) - self.read_bytes += buff - - # Increase buffer for long reads - if self.read_bytes > 7 * 1024 * 1024 and self.prebuffer < 5 * 1024 * 1024: - self.site.log.debug("%s: Increasing bigfile buffer size to 5MB..." % self.inner_path) - self.prebuffer = 5 * 1024 * 1024 - - return self.f.read(buff) - - def seek(self, pos, whence=0): - with self.read_lock: - if whence == 2: # Relative from file end - pos = self.size + pos # Use the real size instead of size on the disk - whence = 0 - return self.f.seek(pos, whence) - - def tell(self): - return self.f.tell() - - def close(self): - self.f.close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - - -@PluginManager.registerTo("WorkerManager") -class WorkerManagerPlugin(object): - def addTask(self, inner_path, *args, **kwargs): - file_info = kwargs.get("file_info") - if file_info and "piecemap" in file_info: # Bigfile - self.site.settings["has_bigfile"] = True - - piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"] - piecemap_task = None - if not self.site.storage.isFile(piecemap_inner_path): - # Start download piecemap - piecemap_task = super(WorkerManagerPlugin, self).addTask(piecemap_inner_path, priority=30) - autodownload_bigfile_size_limit = self.site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit) - if "|" not in inner_path and self.site.isDownloadable(inner_path) and file_info["size"] / 1024 / 1024 <= autodownload_bigfile_size_limit: - gevent.spawn_later(0.1, self.site.needFile, inner_path + "|all") # Download all pieces - - if "|" in inner_path: - # Start download piece - task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs) - - inner_path, file_range = inner_path.split("|") - pos_from, pos_to = map(int, file_range.split("-")) - task["piece_i"] = pos_from / file_info["piece_size"] - task["sha512"] = file_info["sha512"] - else: - if inner_path in self.site.bad_files: - del self.site.bad_files[inner_path] - if piecemap_task: - task = piecemap_task - else: - fake_evt = gevent.event.AsyncResult() # Don't download anything if no range specified - fake_evt.set(True) - task = {"evt": fake_evt} - - if not self.site.storage.isFile(inner_path): - self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["sha512"]) - piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"])) - self.site.storage.piecefields[file_info["sha512"]].fromstring("0" * piece_num) - else: - task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs) - return task - - def taskAddPeer(self, task, peer): - if "piece_i" in task: - if not peer.piecefields[task["sha512"]][task["piece_i"]]: - if task["sha512"] not in peer.piecefields: - gevent.spawn(peer.updatePiecefields, force=True) - elif not task["peers"]: - gevent.spawn(peer.updatePiecefields) - - return False # Deny to add peers to task if file not in piecefield - return super(WorkerManagerPlugin, self).taskAddPeer(task, peer) - - -@PluginManager.registerTo("FileRequest") -class FileRequestPlugin(object): - def isReadable(self, site, inner_path, file, pos): - # Peek into file - if file.read(10) == "\0" * 10: - # Looks empty, but makes sures we don't have that piece - file_info = site.content_manager.getFileInfo(inner_path) - if "piece_size" in file_info: - piece_i = pos / file_info["piece_size"] - if not site.storage.piecefields[file_info["sha512"]][piece_i]: - return False - # Seek back to position we want to read - file.seek(pos) - return super(FileRequestPlugin, self).isReadable(site, inner_path, file, pos) - - def actionGetPiecefields(self, params): - site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving - self.response({"error": "Unknown site"}) - return False - - # Add peer to site if not added before - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) - if not peer.connection: # Just added - peer.connect(self.connection) # Assign current connection to peer - - piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()} - self.response({"piecefields_packed": piecefields_packed}) - - def actionSetPiecefields(self, params): - site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving - self.response({"error": "Unknown site"}) - self.connection.badAction(5) - return False - - # Add or get peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection) - if not peer.connection: - peer.connect(self.connection) - - peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked) - for sha512, piecefield_packed in params["piecefields_packed"].iteritems(): - peer.piecefields[sha512].unpack(piecefield_packed) - site.settings["has_bigfile"] = True - - self.response({"ok": "Updated"}) - - -@PluginManager.registerTo("Peer") -class PeerPlugin(object): - def __getattr__(self, key): - if key == "piecefields": - self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) - return self.piecefields - elif key == "time_piecefields_updated": - self.time_piecefields_updated = None - return self.time_piecefields_updated - else: - return super(PeerPlugin, self).__getattr__(key) - - @util.Noparallel(ignore_args=True) - def updatePiecefields(self, force=False): - if self.connection and self.connection.handshake.get("rev", 0) < 2190: - return False # Not supported - - # Don't update piecefield again in 1 min - if self.time_piecefields_updated and time.time() - self.time_piecefields_updated < 60 and not force: - return False - - self.time_piecefields_updated = time.time() - res = self.request("getPiecefields", {"site": self.site.address}) - if not res or "error" in res: - return False - - self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) - try: - for sha512, piecefield_packed in res["piecefields_packed"].iteritems(): - self.piecefields[sha512].unpack(piecefield_packed) - except Exception as err: - self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err)) - - return self.piecefields - - def sendMyHashfield(self, *args, **kwargs): - return super(PeerPlugin, self).sendMyHashfield(*args, **kwargs) - - def updateHashfield(self, *args, **kwargs): - if self.site.settings.get("has_bigfile"): - thread = gevent.spawn(self.updatePiecefields, *args, **kwargs) - back = super(PeerPlugin, self).updateHashfield(*args, **kwargs) - thread.join() - return back - else: - return super(PeerPlugin, self).updateHashfield(*args, **kwargs) - - def getFile(self, site, inner_path, *args, **kwargs): - if "|" in inner_path: - inner_path, file_range = inner_path.split("|") - pos_from, pos_to = map(int, file_range.split("-")) - kwargs["pos_from"] = pos_from - kwargs["pos_to"] = pos_to - return super(PeerPlugin, self).getFile(site, inner_path, *args, **kwargs) - - -@PluginManager.registerTo("Site") -class SitePlugin(object): - def isFileDownloadAllowed(self, inner_path, file_info): - if "piecemap" in file_info: - file_size_mb = file_info["size"] / 1024 / 1024 - if config.bigfile_size_limit and file_size_mb > config.bigfile_size_limit: - self.log.debug( - "Bigfile size %s too large: %sMB > %sMB, skipping..." % - (inner_path, file_size_mb, config.bigfile_size_limit) - ) - return False - - file_info = file_info.copy() - file_info["size"] = file_info["piece_size"] - return super(SitePlugin, self).isFileDownloadAllowed(inner_path, file_info) - - def getSettingsCache(self): - back = super(SitePlugin, self).getSettingsCache() - if self.storage.piecefields: - back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()} - return back - - def needFile(self, inner_path, *args, **kwargs): - if inner_path.endswith("|all"): - @util.Pooled(20) - def pooledNeedBigfile(inner_path, *args, **kwargs): - if inner_path not in self.bad_files: - self.log.debug("Cancelled piece, skipping %s" % inner_path) - return False - return self.needFile(inner_path, *args, **kwargs) - - inner_path = inner_path.replace("|all", "") - file_info = self.needFileInfo(inner_path) - file_size = file_info["size"] - piece_size = file_info["piece_size"] - - piece_num = int(math.ceil(float(file_size) / piece_size)) - - file_threads = [] - - piecefield = self.storage.piecefields.get(file_info["sha512"]) - - for piece_i in range(piece_num): - piece_from = piece_i * piece_size - piece_to = min(file_size, piece_from + piece_size) - if not piecefield or not piecefield[piece_i]: - inner_path_piece = "%s|%s-%s" % (inner_path, piece_from, piece_to) - self.bad_files[inner_path_piece] = self.bad_files.get(inner_path_piece, 1) - res = pooledNeedBigfile(inner_path_piece, blocking=False) - if res is not True and res is not False: - file_threads.append(res) - gevent.joinall(file_threads) - else: - return super(SitePlugin, self).needFile(inner_path, *args, **kwargs) - - -@PluginManager.registerTo("ConfigPlugin") -class ConfigPlugin(object): - def createArguments(self): - group = self.parser.add_argument_group("Bigfile plugin") - group.add_argument('--autodownload_bigfile_size_limit', help='Also download bigfiles smaller than this limit if help distribute option is checked', default=1, metavar="MB", type=int) - group.add_argument('--bigfile_size_limit', help='Maximum size of downloaded big files', default=False, metavar="MB", type=int) - - return super(ConfigPlugin, self).createArguments() diff --git a/plugins/Bigfile/Test/TestBigfile.py b/plugins/Bigfile/Test/TestBigfile.py deleted file mode 100644 index de126682..00000000 --- a/plugins/Bigfile/Test/TestBigfile.py +++ /dev/null @@ -1,522 +0,0 @@ -import time -from cStringIO import StringIO - -import pytest -import msgpack -import mock - -from Connection import ConnectionServer -from Content.ContentManager import VerifyError -from File import FileServer -from File import FileRequest -from Worker import WorkerManager -from Peer import Peer -from Bigfile import BigfilePiecefield, BigfilePiecefieldPacked -from Test import Spy - - -@pytest.mark.usefixtures("resetSettings") -@pytest.mark.usefixtures("resetTempSettings") -class TestBigfile: - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" - - def createBigfile(self, site, inner_path="data/optional.any.iso", pieces=10): - f = site.storage.open(inner_path, "w") - for i in range(pieces * 100): - f.write(("Test%s" % i).ljust(10, "-") * 1000) - f.close() - assert site.content_manager.sign("content.json", self.privatekey) - return inner_path - - def testPiecemapCreate(self, site): - inner_path = self.createBigfile(site) - content = site.storage.loadJson("content.json") - assert "data/optional.any.iso" in content["files_optional"] - file_node = content["files_optional"][inner_path] - assert file_node["size"] == 10 * 1000 * 1000 - assert file_node["sha512"] == "47a72cde3be80b4a829e7674f72b7c6878cf6a70b0c58c6aa6c17d7e9948daf6" - assert file_node["piecemap"] == inner_path + ".piecemap.msgpack" - - piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"] - assert len(piecemap["sha512_pieces"]) == 10 - assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1] - assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3" - - def testVerifyPiece(self, site): - inner_path = self.createBigfile(site) - - # Verify all 10 piece - f = site.storage.open(inner_path, "rb") - for i in range(10): - piece = StringIO(f.read(1024 * 1024)) - piece.seek(0) - site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) - f.close() - - # Try to verify piece 0 with piece 1 hash - with pytest.raises(VerifyError) as err: - i = 1 - f = site.storage.open(inner_path, "rb") - piece = StringIO(f.read(1024 * 1024)) - f.close() - site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) - assert "Invalid hash" in str(err) - - def testSparseFile(self, site): - inner_path = "sparsefile" - - # Create a 100MB sparse file - site.storage.createSparseFile(inner_path, 100 * 1024 * 1024) - - # Write to file beginning - s = time.time() - f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024) - time_write_start = time.time() - s - - # Write to file end - s = time.time() - f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024) - time_write_end = time.time() - s - - # Verify writes - f = site.storage.open(inner_path) - assert f.read(10) == "hellostart" - f.seek(99 * 1024 * 1024) - assert f.read(8) == "helloend" - f.close() - - site.storage.delete(inner_path) - - # Writing to end shold not take much longer, than writing to start - assert time_write_end <= max(0.1, time_write_start * 1.1) - - def testRangedFileRequest(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp - site_temp.connection_server = client - connection = client.getConnection(file_server.ip, 1544) - - # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) - - buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) - - assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size - assert buff.getvalue().startswith("Test524") # Correct data - buff.seek(0) - assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash - - connection.close() - client.stop() - - def testRangedFileDownload(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Make sure the file and the piecemap in the optional hashfield - file_info = site.content_manager.getFileInfo(inner_path) - assert site.content_manager.hashfield.hasHash(file_info["sha512"]) - - piecemap_hash = site.content_manager.getFileInfo(file_info["piecemap"])["sha512"] - assert site.content_manager.hashfield.hasHash(piecemap_hash) - - # Init client server - client = ConnectionServer(file_server.ip, 1545) - site_temp.connection_server = client - peer_client = site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] - assert not bad_files - - # client_piecefield = peer_client.piecefields[file_info["sha512"]].tostring() - # assert client_piecefield == "1" * 10 - - # Download 5. and 10. block - - site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) - site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024)) - - # Verify 0. block not downloaded - f = site_temp.storage.open(inner_path) - assert f.read(10) == "\0" * 10 - # Verify 5. and 10. block downloaded - f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" - f.seek(9 * 1024 * 1024) - assert f.read(7) == "943---T" - - # Verify hashfield - assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack - - def testOpenBigfile(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = ConnectionServer(file_server.ip, 1545) - site_temp.connection_server = client - site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - # Open virtual file - assert not site_temp.storage.isFile(inner_path) - - with site_temp.storage.openBigfile(inner_path) as f: - with Spy.Spy(FileRequest, "route") as requests: - f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" - - f.seek(9 * 1024 * 1024) - assert f.read(7) == "943---T" - - assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces - - assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) - - assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001" - assert f.sha512 in site_temp.getSettingsCache()["piecefields"] - - # Test requesting already downloaded - with Spy.Spy(FileRequest, "route") as requests: - f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" - - assert len(requests) == 0 - - # Test requesting multi-block overflow reads - with Spy.Spy(FileRequest, "route") as requests: - f.seek(5 * 1024 * 1024) # We already have this block - data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block - assert data.startswith("Test524") - assert data.endswith("Test838-") - assert "\0" not in data # No null bytes allowed - - assert len(requests) == 2 # Two block download - - # Test out of range request - f.seek(5 * 1024 * 1024) - data = f.read(1024 * 1024 * 30) - assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024) - - f.seek(30 * 1024 * 1024) - data = f.read(1024 * 1024 * 30) - assert len(data) == 0 - - @pytest.mark.parametrize("piecefield_obj", [BigfilePiecefield, BigfilePiecefieldPacked]) - def testPiecefield(self, piecefield_obj, site): - testdatas = [ - "1" * 100 + "0" * 900 + "1" * 4000 + "0" * 4999 + "1", - "010101" * 10 + "01" * 90 + "10" * 400 + "0" * 4999, - "1" * 10000, - "0" * 10000 - ] - for testdata in testdatas: - piecefield = piecefield_obj() - - piecefield.fromstring(testdata) - assert piecefield.tostring() == testdata - assert piecefield[0] == int(testdata[0]) - assert piecefield[100] == int(testdata[100]) - assert piecefield[1000] == int(testdata[1000]) - assert piecefield[len(testdata) - 1] == int(testdata[len(testdata) - 1]) - - packed = piecefield.pack() - piecefield_new = piecefield_obj() - piecefield_new.unpack(packed) - assert piecefield.tostring() == piecefield_new.tostring() - assert piecefield_new.tostring() == testdata - - def testFileGet(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - site_temp.connection_server = FileServer(file_server.ip, 1545) - site_temp.connection_server.sites[site_temp.address] = site_temp - site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - # Download second block - with site_temp.storage.openBigfile(inner_path) as f: - f.seek(1024 * 1024) - assert f.read(1024)[0] != "\0" - - # Make sure first block not download - with site_temp.storage.open(inner_path) as f: - assert f.read(1024)[0] == "\0" - - peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) - - # Should drop error on first block request - assert not peer2.getFile(site.address, "%s|0-%s" % (inner_path, 1024 * 1024 * 1)) - - # Should not drop error for second block request - assert peer2.getFile(site.address, "%s|%s-%s" % (inner_path, 1024 * 1024 * 1, 1024 * 1024 * 2)) - - def benchmarkPeerMemory(self, site, file_server): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - import psutil, os - meminfo = psutil.Process(os.getpid()).memory_info - - mem_s = meminfo()[0] - s = time.time() - for i in range(25000): - site.addPeer(file_server.ip, i) - print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB - print site.peers.values()[0].piecefields - - def testUpdatePiecefield(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - server1 = file_server - server1.sites[site.address] = site - server2 = FileServer(file_server.ip, 1545) - server2.sites[site_temp.address] = site_temp - site_temp.connection_server = server2 - - # Add file_server as peer to client - server2_peer1 = site_temp.addPeer(file_server.ip, 1544) - - # Testing piecefield sync - assert len(server2_peer1.piecefields) == 0 - assert server2_peer1.updatePiecefields() # Query piecefields from peer - assert len(server2_peer1.piecefields) > 0 - - def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - server1 = file_server - server1.sites[site.address] = site - server2 = FileServer(file_server.ip, 1545) - server2.sites[site_temp.address] = site_temp - site_temp.connection_server = server2 - - # Add file_server as peer to client - server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working - - site_temp.downloadContent("content.json", download_files=False) - site_temp.needFile("data/optional.any.iso.piecemap.msgpack") - - # Add fake peers with optional files downloaded - for i in range(5): - fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544) - fake_peer.hashfield = site.content_manager.hashfield - fake_peer.has_hashfield = True - - with Spy.Spy(WorkerManager, "addWorker") as requests: - site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) - site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024)) - - # It should only request parts from peer1 as the other peers does not have the requested parts in piecefields - assert len([request[1] for request in requests if request[1] != server2_peer1]) == 0 - - def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - server1 = file_server - server1.sites[site.address] = site - server2 = FileServer(file_server.ip, 1545) - server2.sites[site_temp.address] = site_temp - site_temp.connection_server = server2 - sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] - - # Create 10 fake peer for each piece - for i in range(10): - peer = Peer(file_server.ip, 1544, site_temp, server2) - peer.piecefields[sha512][i] = "1" - peer.updateHashfield = mock.MagicMock(return_value=False) - peer.updatePiecefields = mock.MagicMock(return_value=False) - peer.findHashIds = mock.MagicMock(return_value={"nope": []}) - peer.hashfield = site.content_manager.hashfield - peer.has_hashfield = True - peer.key = "Peer:%s" % i - site_temp.peers["Peer:%s" % i] = peer - - site_temp.downloadContent("content.json", download_files=False) - site_temp.needFile("data/optional.any.iso.piecemap.msgpack") - - with Spy.Spy(Peer, "getFile") as requests: - for i in range(10): - site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024)) - - assert len(requests) == 10 - for i in range(10): - assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer - - def testDownloadStats(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = ConnectionServer(file_server.ip, 1545) - site_temp.connection_server = client - site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - # Open virtual file - assert not site_temp.storage.isFile(inner_path) - - # Check size before downloads - assert site_temp.settings["size"] < 10 * 1024 * 1024 - assert site_temp.settings["optional_downloaded"] == 0 - size_piecemap = site_temp.content_manager.getFileInfo(inner_path + ".piecemap.msgpack")["size"] - size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"] - - with site_temp.storage.openBigfile(inner_path) as f: - assert "\0" not in f.read(1024) - assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile - - with site_temp.storage.openBigfile(inner_path) as f: - # Don't count twice - assert "\0" not in f.read(1024) - assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile - - # Add second block - assert "\0" not in f.read(1024 * 1024) - assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile - - def testPrebuffer(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = ConnectionServer(file_server.ip, 1545) - site_temp.connection_server = client - site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - # Open virtual file - assert not site_temp.storage.isFile(inner_path) - - with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f: - with Spy.Spy(FileRequest, "route") as requests: - f.seek(5 * 1024 * 1024) - assert f.read(7) == "Test524" - # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces - assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2 - - time.sleep(0.5) # Wait prebuffer download - - sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] - assert site_temp.storage.piecefields[sha512].tostring() == "0000011100" - - # No prebuffer beyond end of the file - f.seek(9 * 1024 * 1024) - assert "\0" not in f.read(7) - - assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0 - - def testDownloadAllPieces(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = ConnectionServer(file_server.ip, 1545) - site_temp.connection_server = client - site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - # Open virtual file - assert not site_temp.storage.isFile(inner_path) - - with Spy.Spy(FileRequest, "route") as requests: - site_temp.needFile("%s|all" % inner_path) - - assert len(requests) == 12 # piecemap.msgpack, getPiecefields, 10 x piece - - # Don't re-download already got pieces - with Spy.Spy(FileRequest, "route") as requests: - site_temp.needFile("%s|all" % inner_path) - - assert len(requests) == 0 - - def testFileSize(self, file_server, site, site_temp): - inner_path = self.createBigfile(site) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = ConnectionServer(file_server.ip, 1545) - site_temp.connection_server = client - site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - # Open virtual file - assert not site_temp.storage.isFile(inner_path) - - # Download first block - site_temp.needFile("%s|%s-%s" % (inner_path, 0 * 1024 * 1024, 1 * 1024 * 1024)) - assert site_temp.storage.getSize(inner_path) < 1000 * 1000 * 10 # Size on the disk should be smaller than the real size - - site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024)) - assert site_temp.storage.getSize(inner_path) == site.storage.getSize(inner_path) - - @pytest.mark.parametrize("size", [1024 * 3, 1024 * 1024 * 3, 1024 * 1024 * 30]) - def testNullFileRead(self, file_server, site, site_temp, size): - inner_path = "data/optional.iso" - - f = site.storage.open(inner_path, "w") - f.write("\0" * size) - f.close() - assert site.content_manager.sign("content.json", self.privatekey) - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - site_temp.connection_server = FileServer(file_server.ip, 1545) - site_temp.connection_server.sites[site_temp.address] = site_temp - site_temp.addPeer(file_server.ip, 1544) - - # Download site - site_temp.download(blind_includes=True).join(timeout=5) - - if "piecemap" in site.content_manager.getFileInfo(inner_path): # Bigfile - site_temp.needFile(inner_path + "|all") - else: - site_temp.needFile(inner_path) - - - assert site_temp.storage.getSize(inner_path) == size diff --git a/plugins/Bigfile/Test/conftest.py b/plugins/Bigfile/Test/conftest.py deleted file mode 100644 index 634e66e2..00000000 --- a/plugins/Bigfile/Test/conftest.py +++ /dev/null @@ -1 +0,0 @@ -from src.Test.conftest import * diff --git a/plugins/Bigfile/Test/pytest.ini b/plugins/Bigfile/Test/pytest.ini deleted file mode 100644 index d09210d1..00000000 --- a/plugins/Bigfile/Test/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = Test*.py -addopts = -rsxX -v --durations=6 -markers = - webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/Bigfile/__init__.py b/plugins/Bigfile/__init__.py deleted file mode 100644 index 005d6661..00000000 --- a/plugins/Bigfile/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -import BigfilePlugin -from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked \ No newline at end of file diff --git a/plugins/Chart/ChartCollector.py b/plugins/Chart/ChartCollector.py deleted file mode 100644 index 471c4b91..00000000 --- a/plugins/Chart/ChartCollector.py +++ /dev/null @@ -1,182 +0,0 @@ -import time -import sys -import collections -import itertools -import logging - -import gevent -from util import helper -from Config import config - - -class ChartCollector(object): - def __init__(self, db): - self.db = db - if config.action == "main": - gevent.spawn_later(60 * 3, self.collector) - self.log = logging.getLogger("ChartCollector") - self.last_values = collections.defaultdict(dict) - - def setInitialLastValues(self, sites): - # Recover last value of site bytes/sent - for site in sites: - self.last_values["site:" + site.address]["site_bytes_recv"] = site.settings.get("bytes_recv", 0) - self.last_values["site:" + site.address]["site_bytes_sent"] = site.settings.get("bytes_sent", 0) - - def getCollectors(self): - collectors = {} - file_server = sys.modules["main"].file_server - sites = file_server.sites - if not sites: - return collectors - content_db = sites.values()[0].content_manager.contents.db - - # Connection stats - collectors["connection"] = lambda: len(file_server.connections) - collectors["connection_in"] = ( - lambda: len([1 for connection in file_server.connections if connection.type == "in"]) - ) - collectors["connection_onion"] = ( - lambda: len([1 for connection in file_server.connections if connection.ip.endswith(".onion")]) - ) - collectors["connection_ping_avg"] = ( - lambda: round(1000 * helper.avg( - [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay] - )) - ) - collectors["connection_ping_min"] = ( - lambda: round(1000 * min( - [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay] - )) - ) - collectors["connection_rev_avg"] = ( - lambda: helper.avg( - [connection.handshake["rev"] for connection in file_server.connections if connection.handshake] - ) - ) - - # Request stats - collectors["file_bytes_recv|change"] = lambda: file_server.bytes_recv - collectors["file_bytes_sent|change"] = lambda: file_server.bytes_sent - collectors["request_num_recv|change"] = lambda: file_server.num_recv - collectors["request_num_sent|change"] = lambda: file_server.num_sent - - # Limit - collectors["optional_limit"] = lambda: content_db.getOptionalLimitBytes() - collectors["optional_used"] = lambda: content_db.getOptionalUsedBytes() - collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()]) - - # Peers - collectors["peer"] = lambda (peers): len(peers) - collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer]) - - # Size - collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()]) - collectors["size_optional"] = lambda: sum([site.settings.get("size_optional", 0) for site in sites.values()]) - collectors["content"] = lambda: sum([len(site.content_manager.contents) for site in sites.values()]) - - return collectors - - def getSiteCollectors(self): - site_collectors = {} - - # Size - site_collectors["site_size"] = lambda(site): site.settings.get("size", 0) - site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0) - site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0) - site_collectors["site_content"] = lambda(site): len(site.content_manager.contents) - - # Data transfer - site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0) - site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0) - - # Peers - site_collectors["site_peer"] = lambda(site): len(site.peers) - site_collectors["site_peer_onion"] = lambda(site): len( - [True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")] - ) - site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection]) - - return site_collectors - - def getUniquePeers(self): - sites = sys.modules["main"].file_server.sites - return set(itertools.chain.from_iterable( - [site.peers.keys() for site in sites.values()] - )) - - def collectDatas(self, collectors, last_values, site=None): - if site is None: - peers = self.getUniquePeers() - datas = {} - for key, collector in collectors.iteritems(): - try: - if site: - value = collector(site) - elif key.startswith("peer"): - value = collector(peers) - else: - value = collector() - except Exception as err: - self.log.info("Collector %s error: %s" % (key, err)) - value = None - - if "|change" in key: # Store changes relative to last value - key = key.replace("|change", "") - last_value = last_values.get(key, 0) - last_values[key] = value - value = value - last_value - - if value is None: - datas[key] = None - else: - datas[key] = round(value, 3) - return datas - - def collectGlobal(self, collectors, last_values): - now = int(time.time()) - s = time.time() - datas = self.collectDatas(collectors, last_values["global"]) - values = [] - for key, value in datas.iteritems(): - values.append((self.db.getTypeId(key), value, now)) - self.log.debug("Global collectors done in %.3fs" % (time.time() - s)) - - s = time.time() - cur = self.db.getCursor() - cur.execute("BEGIN") - cur.cursor.executemany("INSERT INTO data (type_id, value, date_added) VALUES (?, ?, ?)", values) - cur.execute("END") - cur.close() - self.log.debug("Global collectors inserted in %.3fs" % (time.time() - s)) - - def collectSites(self, sites, collectors, last_values): - now = int(time.time()) - s = time.time() - values = [] - for address, site in sites.iteritems(): - site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site) - for key, value in site_datas.iteritems(): - values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now)) - time.sleep(0.000001) - self.log.debug("Site collections done in %.3fs" % (time.time() - s)) - - s = time.time() - cur = self.db.getCursor() - cur.execute("BEGIN") - cur.cursor.executemany("INSERT INTO data (type_id, site_id, value, date_added) VALUES (?, ?, ?, ?)", values) - cur.execute("END") - cur.close() - self.log.debug("Site collectors inserted in %.3fs" % (time.time() - s)) - - def collector(self): - collectors = self.getCollectors() - site_collectors = self.getSiteCollectors() - sites = sys.modules["main"].file_server.sites - i = 0 - while 1: - self.collectGlobal(collectors, self.last_values) - if i % 12 == 0: # Only collect sites data every hour - self.collectSites(sites, site_collectors, self.last_values) - time.sleep(60 * 5) - i += 1 diff --git a/plugins/Chart/ChartDb.py b/plugins/Chart/ChartDb.py deleted file mode 100644 index 3747dca3..00000000 --- a/plugins/Chart/ChartDb.py +++ /dev/null @@ -1,133 +0,0 @@ -from Config import config -from Db import Db -import time - - -class ChartDb(Db): - def __init__(self): - self.version = 2 - super(ChartDb, self).__init__(self.getSchema(), "%s/chart.db" % config.data_dir) - self.foreign_keys = True - self.checkTables() - self.sites = self.loadSites() - self.types = self.loadTypes() - - def getSchema(self): - schema = {} - schema["db_name"] = "Chart" - schema["tables"] = {} - schema["tables"]["data"] = { - "cols": [ - ["data_id", "INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE"], - ["type_id", "INTEGER NOT NULL"], - ["site_id", "INTEGER"], - ["value", "INTEGER"], - ["date_added", "DATETIME DEFAULT (CURRENT_TIMESTAMP)"] - ], - "indexes": [ - "CREATE INDEX site_id ON data (site_id)", - "CREATE INDEX date_added ON data (date_added)" - ], - "schema_changed": 2 - } - schema["tables"]["type"] = { - "cols": [ - ["type_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"], - ["name", "TEXT"] - ], - "schema_changed": 1 - } - schema["tables"]["site"] = { - "cols": [ - ["site_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"], - ["address", "TEXT"] - ], - "schema_changed": 1 - } - return schema - - def getTypeId(self, name): - if name not in self.types: - self.execute("INSERT INTO type ?", {"name": name}) - self.types[name] = self.cur.cursor.lastrowid - - return self.types[name] - - def getSiteId(self, address): - if address not in self.sites: - self.execute("INSERT INTO site ?", {"address": address}) - self.sites[address] = self.cur.cursor.lastrowid - - return self.sites[address] - - def loadSites(self): - sites = {} - for row in self.execute("SELECT * FROM site"): - sites[row["address"]] = row["site_id"] - return sites - - def loadTypes(self): - types = {} - for row in self.execute("SELECT * FROM type"): - types[row["name"]] = row["type_id"] - return types - - def deleteSite(self, address): - if address in self.sites: - site_id = self.sites[address] - del self.sites[address] - self.execute("DELETE FROM site WHERE ?", {"site_id": site_id}) - self.execute("DELETE FROM data WHERE ?", {"site_id": site_id}) - - def archive(self): - week_back = 1 - while 1: - s = time.time() - date_added_from = time.time() - 60 * 60 * 24 * 7 * (week_back + 1) - date_added_to = date_added_from + 60 * 60 * 24 * 7 - res = self.execute(""" - SELECT - MAX(date_added) AS date_added, - SUM(value) AS value, - GROUP_CONCAT(data_id) AS data_ids, - type_id, - site_id, - COUNT(*) AS num - FROM data - WHERE - site_id IS NULL AND - date_added > :date_added_from AND - date_added < :date_added_to - GROUP BY strftime('%Y-%m-%d %H', date_added, 'unixepoch', 'localtime'), type_id - """, {"date_added_from": date_added_from, "date_added_to": date_added_to}) - - num_archived = 0 - cur = self.getCursor() - for row in res: - if row["num"] == 1: - continue - cur.execute("INSERT INTO data ?", { - "type_id": row["type_id"], - "site_id": row["site_id"], - "value": row["value"], - "date_added": row["date_added"] - }) - cur.execute("DELETE FROM data WHERE data_id IN (%s)" % row["data_ids"]) - num_archived += row["num"] - self.log.debug("Archived %s data from %s weeks ago in %.3fs" % (num_archived, week_back, time.time() - s)) - week_back += 1 - time.sleep(0.1) - if num_archived == 0: - break - # Only keep 6 month of global stats - self.execute( - "DELETE FROM data WHERE site_id IS NULL AND date_added < :date_added_limit", - {"date_added_limit": time.time() - 60 * 60 * 24 * 30 * 6 } - ) - # Only keep 1 month of site stats - self.execute( - "DELETE FROM data WHERE site_id IS NOT NULL AND date_added < :date_added_limit", - {"date_added_limit": time.time() - 60 * 60 * 24 * 30 } - ) - if week_back > 1: - self.execute("VACUUM") diff --git a/plugins/Chart/ChartPlugin.py b/plugins/Chart/ChartPlugin.py deleted file mode 100644 index a491618b..00000000 --- a/plugins/Chart/ChartPlugin.py +++ /dev/null @@ -1,60 +0,0 @@ -import time -import itertools - -import gevent - -from Config import config -from util import helper -from Plugin import PluginManager -from ChartDb import ChartDb -from ChartCollector import ChartCollector - -if "db" not in locals().keys(): # Share on reloads - db = ChartDb() - gevent.spawn_later(10 * 60, db.archive) - helper.timer(60 * 60 * 6, db.archive) - collector = ChartCollector(db) - -@PluginManager.registerTo("SiteManager") -class SiteManagerPlugin(object): - def load(self, *args, **kwargs): - back = super(SiteManagerPlugin, self).load(*args, **kwargs) - collector.setInitialLastValues(self.sites.values()) - return back - - def delete(self, address, *args, **kwargs): - db.deleteSite(address) - return super(SiteManagerPlugin, self).delete(address, *args, **kwargs) - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def actionChartDbQuery(self, to, query, params=None): - if not "ADMIN" in self.permissions: - return {"error": "No permission"} - - if config.debug or config.verbose: - s = time.time() - rows = [] - try: - if not query.strip().upper().startswith("SELECT"): - raise Exception("Only SELECT query supported") - res = db.execute(query, params) - except Exception, err: # Response the error to client - self.log.error("ChartDbQuery error: %s" % err) - return {"error": str(err)} - # Convert result to dict - for row in res: - rows.append(dict(row)) - if config.verbose and time.time() - s > 0.1: # Log slow query - self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s)) - return rows - - def actionChartGetPeerLocations(self, to): - if not "ADMIN" in self.permissions: - return {"error": "No permission"} - - peers = {} - for site in self.server.sites.values(): - peers.update(site.peers) - peer_locations = self.getPeerLocations(peers) - return peer_locations diff --git a/plugins/Chart/__init__.py b/plugins/Chart/__init__.py deleted file mode 100644 index 78981122..00000000 --- a/plugins/Chart/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import ChartPlugin \ No newline at end of file diff --git a/plugins/ContentFilter/ContentFilterPlugin.py b/plugins/ContentFilter/ContentFilterPlugin.py deleted file mode 100644 index 05f33376..00000000 --- a/plugins/ContentFilter/ContentFilterPlugin.py +++ /dev/null @@ -1,223 +0,0 @@ -import time -import re -import cgi -import hashlib - -from Plugin import PluginManager -from Translate import Translate -from Config import config - -from ContentFilterStorage import ContentFilterStorage - - -if "_" not in locals(): - _ = Translate("plugins/ContentFilter/languages/") - - -@PluginManager.registerTo("SiteManager") -class SiteManagerPlugin(object): - def load(self, *args, **kwargs): - global filter_storage - super(SiteManagerPlugin, self).load(*args, **kwargs) - filter_storage = ContentFilterStorage(site_manager=self) - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - # Mute - def cbMuteAdd(self, to, auth_address, cert_user_id, reason): - filter_storage.file_content["mutes"][auth_address] = { - "cert_user_id": cert_user_id, "reason": reason, "source": self.site.address, "date_added": time.time() - } - filter_storage.save() - filter_storage.changeDbs(auth_address, "remove") - self.response(to, "ok") - - def actionMuteAdd(self, to, auth_address, cert_user_id, reason): - if "ADMIN" in self.getPermissions(to): - self.cbMuteAdd(to, auth_address, cert_user_id, reason) - else: - self.cmd( - "confirm", - [_["Hide all content from %s?"] % cgi.escape(cert_user_id), _["Mute"]], - lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason) - ) - - def cbMuteRemove(self, to, auth_address): - del filter_storage.file_content["mutes"][auth_address] - filter_storage.save() - filter_storage.changeDbs(auth_address, "load") - self.response(to, "ok") - - def actionMuteRemove(self, to, auth_address): - if "ADMIN" in self.getPermissions(to): - self.cbMuteRemove(to, auth_address) - else: - self.cmd( - "confirm", - [_["Unmute %s?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]], - lambda (res): self.cbMuteRemove(to, auth_address) - ) - - def actionMuteList(self, to): - if "ADMIN" in self.getPermissions(to): - self.response(to, filter_storage.file_content["mutes"]) - else: - return self.response(to, {"error": "Forbidden: Only ADMIN sites can list mutes"}) - - # Siteblock - def actionSiteblockAdd(self, to, site_address, reason=None): - if "ADMIN" not in self.getPermissions(to): - return self.response(to, {"error": "Forbidden: Only ADMIN sites can add to blocklist"}) - filter_storage.file_content["siteblocks"][site_address] = {"date_added": time.time(), "reason": reason} - filter_storage.save() - self.response(to, "ok") - - def actionSiteblockRemove(self, to, site_address): - if "ADMIN" not in self.getPermissions(to): - return self.response(to, {"error": "Forbidden: Only ADMIN sites can remove from blocklist"}) - del filter_storage.file_content["siteblocks"][site_address] - filter_storage.save() - self.response(to, "ok") - - def actionSiteblockList(self, to): - if "ADMIN" in self.getPermissions(to): - self.response(to, filter_storage.file_content["siteblocks"]) - else: - return self.response(to, {"error": "Forbidden: Only ADMIN sites can list blocklists"}) - - # Include - def actionFilterIncludeAdd(self, to, inner_path, description=None, address=None): - if address: - if "ADMIN" not in self.getPermissions(to): - return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"}) - site = self.server.sites[address] - else: - address = self.site.address - site = self.site - - if "ADMIN" in self.getPermissions(to): - self.cbFilterIncludeAdd(to, True, address, inner_path, description) - else: - content = site.storage.loadJson(inner_path) - title = _["New shared global content filter: %s (%s sites, %s users)"] % ( - cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {})) - ) - - self.cmd( - "confirm", - [title, "Add"], - lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description) - ) - - def cbFilterIncludeAdd(self, to, res, address, inner_path, description): - if not res: - self.response(to, res) - return False - - filter_storage.includeAdd(address, inner_path, description) - self.response(to, "ok") - - def actionFilterIncludeRemove(self, to, inner_path, address=None): - if address: - if "ADMIN" not in self.getPermissions(to): - return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"}) - else: - address = self.site.address - - key = "%s/%s" % (address, inner_path) - if key not in filter_storage.file_content["includes"]: - self.response(to, {"error": "Include not found"}) - filter_storage.includeRemove(address, inner_path) - self.response(to, "ok") - - def actionFilterIncludeList(self, to, all_sites=False, filters=False): - if all_sites and "ADMIN" not in self.getPermissions(to): - return self.response(to, {"error": "Forbidden: Only ADMIN sites can list all sites includes"}) - - back = [] - includes = filter_storage.file_content.get("includes", {}).values() - for include in includes: - if not all_sites and include["address"] != self.site.address: - continue - if filters: - include = dict(include) # Don't modify original file_content - include_site = filter_storage.site_manager.get(include["address"]) - if not include_site: - continue - try: - content = include_site.storage.loadJson(include["inner_path"]) - include["error"] = None - except Exception as err: - if include_site.settings["own"]: - include_site.log.warning("Error loading filter %s: %s" % (include["inner_path"], err)) - content = {} - include["error"] = str(err) - include["mutes"] = content.get("mutes", {}) - include["siteblocks"] = content.get("siteblocks", {}) - back.append(include) - self.response(to, back) - - -@PluginManager.registerTo("SiteStorage") -class SiteStoragePlugin(object): - def updateDbFile(self, inner_path, file=None, cur=None): - if file is not False: # File deletion always allowed - # Find for bitcoin addresses in file path - matches = re.findall("/(1[A-Za-z0-9]{26,35})/", inner_path) - # Check if any of the adresses are in the mute list - for auth_address in matches: - if filter_storage.isMuted(auth_address): - self.log.debug("Mute match: %s, ignoring %s" % (auth_address, inner_path)) - return False - - return super(SiteStoragePlugin, self).updateDbFile(inner_path, file=file, cur=cur) - - def onUpdated(self, inner_path, file=None): - file_path = "%s/%s" % (self.site.address, inner_path) - if file_path in filter_storage.file_content["includes"]: - self.log.debug("Filter file updated: %s" % inner_path) - filter_storage.includeUpdateAll() - return super(SiteStoragePlugin, self).onUpdated(inner_path, file=file) - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - def actionWrapper(self, path, extra_headers=None): - match = re.match("/(?P
[A-Za-z0-9\._-]+)(?P/.*|$)", path) - if not match: - return False - address = match.group("address") - - if self.server.site_manager.get(address): # Site already exists - return super(UiRequestPlugin, self).actionWrapper(path, extra_headers) - - if self.server.site_manager.isDomain(address): - address = self.server.site_manager.resolveDomain(address) - - if address: - address_sha256 = "0x" + hashlib.sha256(address).hexdigest() - else: - address_sha256 = None - - if filter_storage.isSiteblocked(address) or filter_storage.isSiteblocked(address_sha256): - site = self.server.site_manager.get(config.homepage) - if not extra_headers: - extra_headers = {} - - script_nonce = self.getScriptNonce() - - self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) - return iter([super(UiRequestPlugin, self).renderWrapper( - site, path, "uimedia/plugins/contentfilter/blocklisted.html?address=" + address, - "Blacklisted site", extra_headers, show_loadingscreen=False, script_nonce=script_nonce - )]) - else: - return super(UiRequestPlugin, self).actionWrapper(path, extra_headers) - - def actionUiMedia(self, path, *args, **kwargs): - if path.startswith("/uimedia/plugins/contentfilter/"): - file_path = path.replace("/uimedia/plugins/contentfilter/", "plugins/ContentFilter/media/") - return self.actionFile(file_path) - else: - return super(UiRequestPlugin, self).actionUiMedia(path) diff --git a/plugins/ContentFilter/ContentFilterStorage.py b/plugins/ContentFilter/ContentFilterStorage.py deleted file mode 100644 index 17af298f..00000000 --- a/plugins/ContentFilter/ContentFilterStorage.py +++ /dev/null @@ -1,140 +0,0 @@ -import os -import json -import logging -import collections -import time - -from Debug import Debug -from Plugin import PluginManager -from Config import config -from util import helper - -class ContentFilterStorage(object): - def __init__(self, site_manager): - self.log = logging.getLogger("ContentFilterStorage") - self.file_path = "%s/filters.json" % config.data_dir - self.site_manager = site_manager - self.file_content = self.load() - - # Set default values for filters.json - if not self.file_content: - self.file_content = {} - - # Site blacklist renamed to site blocks - if "site_blacklist" in self.file_content: - self.file_content["siteblocks"] = self.file_content["site_blacklist"] - del self.file_content["site_blacklist"] - - for key in ["mutes", "siteblocks", "includes"]: - if key not in self.file_content: - self.file_content[key] = {} - - self.include_filters = collections.defaultdict(set) # Merged list of mutes and blacklists from all include - self.includeUpdateAll(update_site_dbs=False) - - def load(self): - # Rename previously used mutes.json -> filters.json - if os.path.isfile("%s/mutes.json" % config.data_dir): - self.log.info("Renaming mutes.json to filters.json...") - os.rename("%s/mutes.json" % config.data_dir, self.file_path) - if os.path.isfile(self.file_path): - try: - return json.load(open(self.file_path)) - except Exception as err: - self.log.error("Error loading filters.json: %s" % err) - return None - else: - return None - - def includeUpdateAll(self, update_site_dbs=True): - s = time.time() - new_include_filters = collections.defaultdict(set) - - # Load all include files data into a merged set - for include_path in self.file_content["includes"]: - address, inner_path = include_path.split("/", 1) - try: - content = self.site_manager.get(address).storage.loadJson(inner_path) - except Exception as err: - self.log.warning( - "Error loading include %s: %s" % - (include_path, Debug.formatException(err)) - ) - continue - - for key, val in content.iteritems(): - if type(val) is not dict: - continue - - new_include_filters[key].update(val.keys()) - - mutes_added = new_include_filters["mutes"].difference(self.include_filters["mutes"]) - mutes_removed = self.include_filters["mutes"].difference(new_include_filters["mutes"]) - - self.include_filters = new_include_filters - - if update_site_dbs: - for auth_address in mutes_added: - self.changeDbs(auth_address, "remove") - - for auth_address in mutes_removed: - if not self.isMuted(auth_address): - self.changeDbs(auth_address, "load") - - num_mutes = len(self.include_filters["mutes"]) - num_siteblocks = len(self.include_filters["siteblocks"]) - self.log.debug( - "Loaded %s mutes, %s blocked sites from %s includes in %.3fs" % - (num_mutes, num_siteblocks, len(self.file_content["includes"]), time.time() - s) - ) - - def includeAdd(self, address, inner_path, description=None): - self.file_content["includes"]["%s/%s" % (address, inner_path)] = { - "date_added": time.time(), - "address": address, - "description": description, - "inner_path": inner_path - } - self.includeUpdateAll() - self.save() - - def includeRemove(self, address, inner_path): - del self.file_content["includes"]["%s/%s" % (address, inner_path)] - self.includeUpdateAll() - self.save() - - def save(self): - s = time.time() - helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True)) - self.log.debug("Saved in %.3fs" % (time.time() - s)) - - def isMuted(self, auth_address): - if auth_address in self.file_content["mutes"] or auth_address in self.include_filters["mutes"]: - return True - else: - return False - - def isSiteblocked(self, address): - if address in self.file_content["siteblocks"] or address in self.include_filters["siteblocks"]: - return True - else: - return False - - # Search and remove or readd files of an user - def changeDbs(self, auth_address, action): - self.log.debug("Mute action %s on user %s" % (action, auth_address)) - res = self.site_manager.list().values()[0].content_manager.contents.db.execute( - "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path", - {"inner_path": "%%/%s/%%" % auth_address} - ) - for row in res: - site = self.site_manager.sites.get(row["address"]) - if not site: - continue - dir_inner_path = helper.getDirname(row["inner_path"]) - for file_name in site.storage.walk(dir_inner_path): - if action == "remove": - site.storage.onUpdated(dir_inner_path + file_name, False) - else: - site.storage.onUpdated(dir_inner_path + file_name) - site.onFileDone(dir_inner_path + file_name) diff --git a/plugins/ContentFilter/Test/TestContentFilter.py b/plugins/ContentFilter/Test/TestContentFilter.py deleted file mode 100644 index e1b37b16..00000000 --- a/plugins/ContentFilter/Test/TestContentFilter.py +++ /dev/null @@ -1,82 +0,0 @@ -import pytest -from ContentFilter import ContentFilterPlugin -from Site import SiteManager - - -@pytest.fixture -def filter_storage(): - ContentFilterPlugin.filter_storage = ContentFilterPlugin.ContentFilterStorage(SiteManager.site_manager) - return ContentFilterPlugin.filter_storage - - -@pytest.mark.usefixtures("resetSettings") -@pytest.mark.usefixtures("resetTempSettings") -class TestContentFilter: - def createInclude(self, site): - site.storage.writeJson("filters.json", { - "mutes": {"1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C": {}}, - "siteblocks": {site.address: {}} - }) - - def testIncludeLoad(self, site, filter_storage): - self.createInclude(site) - filter_storage.file_content["includes"]["%s/%s" % (site.address, "filters.json")] = { - "date_added": 1528295893, - } - - assert not filter_storage.include_filters["mutes"] - assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") - assert not filter_storage.isSiteblocked(site.address) - filter_storage.includeUpdateAll(update_site_dbs=False) - assert len(filter_storage.include_filters["mutes"]) == 1 - assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") - assert filter_storage.isSiteblocked(site.address) - - def testIncludeAdd(self, site, filter_storage): - self.createInclude(site) - query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C'" - assert not filter_storage.isSiteblocked(site.address) - assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") - assert site.storage.query(query_num_json).fetchone()["num"] == 2 - - # Add include - filter_storage.includeAdd(site.address, "filters.json") - - assert filter_storage.isSiteblocked(site.address) - assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") - assert site.storage.query(query_num_json).fetchone()["num"] == 0 - - # Remove include - filter_storage.includeRemove(site.address, "filters.json") - - assert not filter_storage.isSiteblocked(site.address) - assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") - assert site.storage.query(query_num_json).fetchone()["num"] == 2 - - def testIncludeChange(self, site, filter_storage): - self.createInclude(site) - filter_storage.includeAdd(site.address, "filters.json") - assert filter_storage.isSiteblocked(site.address) - assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") - - # Add new blocked site - assert not filter_storage.isSiteblocked("1Hello") - - filter_content = site.storage.loadJson("filters.json") - filter_content["siteblocks"]["1Hello"] = {} - site.storage.writeJson("filters.json", filter_content) - - assert filter_storage.isSiteblocked("1Hello") - - # Add new muted user - query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q'" - assert not filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") - assert site.storage.query(query_num_json).fetchone()["num"] == 2 - - filter_content["mutes"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"] = {} - site.storage.writeJson("filters.json", filter_content) - - assert filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") - assert site.storage.query(query_num_json).fetchone()["num"] == 0 - - diff --git a/plugins/ContentFilter/Test/conftest.py b/plugins/ContentFilter/Test/conftest.py deleted file mode 100644 index 634e66e2..00000000 --- a/plugins/ContentFilter/Test/conftest.py +++ /dev/null @@ -1 +0,0 @@ -from src.Test.conftest import * diff --git a/plugins/ContentFilter/Test/pytest.ini b/plugins/ContentFilter/Test/pytest.ini deleted file mode 100644 index d09210d1..00000000 --- a/plugins/ContentFilter/Test/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = Test*.py -addopts = -rsxX -v --durations=6 -markers = - webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/ContentFilter/__init__.py b/plugins/ContentFilter/__init__.py deleted file mode 100644 index 4d8c3acc..00000000 --- a/plugins/ContentFilter/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import ContentFilterPlugin diff --git a/plugins/ContentFilter/languages/hu.json b/plugins/ContentFilter/languages/hu.json deleted file mode 100644 index 9b57e697..00000000 --- a/plugins/ContentFilter/languages/hu.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Hide all content from %s?": "%s tartalmaniak elrejtése?", - "Mute": "Elnémítás", - "Unmute %s?": "%s tartalmaniak megjelenítése?", - "Unmute": "Némítás visszavonása" -} diff --git a/plugins/ContentFilter/languages/it.json b/plugins/ContentFilter/languages/it.json deleted file mode 100644 index 9a2c6761..00000000 --- a/plugins/ContentFilter/languages/it.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Hide all content from %s?": "%s Vuoi nascondere i contenuti di questo utente ?", - "Mute": "Attiva Silenzia", - "Unmute %s?": "%s Vuoi mostrare i contenuti di questo utente ?", - "Unmute": "Disattiva Silenzia" -} diff --git a/plugins/ContentFilter/languages/pt-br.json b/plugins/ContentFilter/languages/pt-br.json deleted file mode 100644 index 3c6bfbdc..00000000 --- a/plugins/ContentFilter/languages/pt-br.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Hide all content from %s?": "%s Ocultar todo o conteúdo de ?", - "Mute": "Ativar o Silêncio", - "Unmute %s?": "%s Você quer mostrar o conteúdo deste usuário ?", - "Unmute": "Desligar o silêncio" -} diff --git a/plugins/ContentFilter/languages/zh-tw.json b/plugins/ContentFilter/languages/zh-tw.json deleted file mode 100644 index 0995f3a0..00000000 --- a/plugins/ContentFilter/languages/zh-tw.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Hide all content from %s?": "屏蔽 %s 的所有內容?", - "Mute": "屏蔽", - "Unmute %s?": "對 %s 解除屏蔽?", - "Unmute": "解除屏蔽" -} diff --git a/plugins/ContentFilter/languages/zh.json b/plugins/ContentFilter/languages/zh.json deleted file mode 100644 index bf63f107..00000000 --- a/plugins/ContentFilter/languages/zh.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Hide all content from %s?": "屏蔽 %s 的所有内容?", - "Mute": "屏蔽", - "Unmute %s?": "对 %s 解除屏蔽?", - "Unmute": "解除屏蔽" -} diff --git a/plugins/ContentFilter/media/blocklisted.html b/plugins/ContentFilter/media/blocklisted.html deleted file mode 100644 index 9a287b72..00000000 --- a/plugins/ContentFilter/media/blocklisted.html +++ /dev/null @@ -1,107 +0,0 @@ - - - - - -
-

Site blocked

-

This site is on your blocklist:

-
-
Too much image
-
on 2015-01-25 12:32:11
-
- -
- - - - - - diff --git a/plugins/ContentFilter/media/js/ZeroFrame.js b/plugins/ContentFilter/media/js/ZeroFrame.js deleted file mode 100644 index d6facdbf..00000000 --- a/plugins/ContentFilter/media/js/ZeroFrame.js +++ /dev/null @@ -1,119 +0,0 @@ -// Version 1.0.0 - Initial release -// Version 1.1.0 (2017-08-02) - Added cmdp function that returns promise instead of using callback -// Version 1.2.0 (2017-08-02) - Added Ajax monkey patch to emulate XMLHttpRequest over ZeroFrame API - -const CMD_INNER_READY = 'innerReady' -const CMD_RESPONSE = 'response' -const CMD_WRAPPER_READY = 'wrapperReady' -const CMD_PING = 'ping' -const CMD_PONG = 'pong' -const CMD_WRAPPER_OPENED_WEBSOCKET = 'wrapperOpenedWebsocket' -const CMD_WRAPPER_CLOSE_WEBSOCKET = 'wrapperClosedWebsocket' - -class ZeroFrame { - constructor(url) { - this.url = url - this.waiting_cb = {} - this.wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1") - this.connect() - this.next_message_id = 1 - this.init() - } - - init() { - return this - } - - connect() { - this.target = window.parent - window.addEventListener('message', e => this.onMessage(e), false) - this.cmd(CMD_INNER_READY) - } - - onMessage(e) { - let message = e.data - let cmd = message.cmd - if (cmd === CMD_RESPONSE) { - if (this.waiting_cb[message.to] !== undefined) { - this.waiting_cb[message.to](message.result) - } - else { - this.log("Websocket callback not found:", message) - } - } else if (cmd === CMD_WRAPPER_READY) { - this.cmd(CMD_INNER_READY) - } else if (cmd === CMD_PING) { - this.response(message.id, CMD_PONG) - } else if (cmd === CMD_WRAPPER_OPENED_WEBSOCKET) { - this.onOpenWebsocket() - } else if (cmd === CMD_WRAPPER_CLOSE_WEBSOCKET) { - this.onCloseWebsocket() - } else { - this.onRequest(cmd, message) - } - } - - onRequest(cmd, message) { - this.log("Unknown request", message) - } - - response(to, result) { - this.send({ - cmd: CMD_RESPONSE, - to: to, - result: result - }) - } - - cmd(cmd, params={}, cb=null) { - this.send({ - cmd: cmd, - params: params - }, cb) - } - - cmdp(cmd, params={}) { - return new Promise((resolve, reject) => { - this.cmd(cmd, params, (res) => { - if (res && res.error) { - reject(res.error) - } else { - resolve(res) - } - }) - }) - } - - send(message, cb=null) { - message.wrapper_nonce = this.wrapper_nonce - message.id = this.next_message_id - this.next_message_id++ - this.target.postMessage(message, '*') - if (cb) { - this.waiting_cb[message.id] = cb - } - } - - log(...args) { - console.log.apply(console, ['[ZeroFrame]'].concat(args)) - } - - onOpenWebsocket() { - this.log('Websocket open') - } - - onCloseWebsocket() { - this.log('Websocket close') - } - - monkeyPatchAjax() { - var page = this - XMLHttpRequest.prototype.realOpen = XMLHttpRequest.prototype.open - this.cmd("wrapperGetAjaxKey", [], (res) => { this.ajax_key = res }) - var newOpen = function (method, url, async) { - url += "?ajax_key=" + page.ajax_key - return this.realOpen(method, url, async) - } - XMLHttpRequest.prototype.open = newOpen - } -} diff --git a/plugins/Cors/CorsPlugin.py b/plugins/Cors/CorsPlugin.py deleted file mode 100644 index 8d758988..00000000 --- a/plugins/Cors/CorsPlugin.py +++ /dev/null @@ -1,104 +0,0 @@ -import re -import cgi -import copy - -from Plugin import PluginManager -from Translate import Translate -if "_" not in locals(): - _ = Translate("plugins/Cors/languages/") - - -def getCorsPath(site, inner_path): - match = re.match("^cors-([A-Za-z0-9]{26,35})/(.*)", inner_path) - if not match: - raise Exception("Invalid cors path: %s" % inner_path) - cors_address = match.group(1) - cors_inner_path = match.group(2) - - if not "Cors:%s" % cors_address in site.settings["permissions"]: - raise Exception("This site has no permission to access site %s" % cors_address) - - return cors_address, cors_inner_path - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def hasSitePermission(self, address, cmd=None): - if super(UiWebsocketPlugin, self).hasSitePermission(address, cmd=cmd): - return True - - if not "Cors:%s" % address in self.site.settings["permissions"] or cmd not in ["fileGet", "fileList", "dirList", "fileRules", "optionalFileInfo", "fileQuery", "dbQuery", "userGetSettings", "siteInfo"]: - return False - else: - return True - - # Add cors support for file commands - def corsFuncWrapper(self, func_name, to, inner_path, *args, **kwargs): - if inner_path.startswith("cors-"): - cors_address, cors_inner_path = getCorsPath(self.site, inner_path) - - req_self = copy.copy(self) - req_self.site = self.server.sites.get(cors_address) # Change the site to the merged one - if not req_self.site: - return {"error": "No site found"} - - func = getattr(super(UiWebsocketPlugin, req_self), func_name) - back = func(to, cors_inner_path, *args, **kwargs) - return back - else: - func = getattr(super(UiWebsocketPlugin, self), func_name) - return func(to, inner_path, *args, **kwargs) - - def actionFileGet(self, to, inner_path, *args, **kwargs): - return self.corsFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs) - - def actionFileList(self, to, inner_path, *args, **kwargs): - return self.corsFuncWrapper("actionFileList", to, inner_path, *args, **kwargs) - - def actionDirList(self, to, inner_path, *args, **kwargs): - return self.corsFuncWrapper("actionDirList", to, inner_path, *args, **kwargs) - - def actionFileRules(self, to, inner_path, *args, **kwargs): - return self.corsFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs) - - def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs): - return self.corsFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs) - - def actionCorsPermission(self, to, address): - site = self.server.sites.get(address) - if site: - site_name = site.content_manager.contents.get("content.json", {}).get("title") - button_title = _["Grant"] - else: - site_name = address - button_title = _["Grant & Add"] - - if site and "Cors:" + address in self.permissions: - return "ignored" - - self.cmd( - "confirm", - [_["This site requests read permission to: %s"] % cgi.escape(site_name), button_title], - lambda (res): self.cbCorsPermission(to, address) - ) - - def cbCorsPermission(self, to, address): - self.actionPermissionAdd(to, "Cors:" + address) - site = self.server.sites.get(address) - if not site: - self.server.site_manager.need(address) - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - # Allow to load cross origin files using /cors-address/file.jpg - def parsePath(self, path): - path_parts = super(UiRequestPlugin, self).parsePath(path) - if "cors-" not in path: # Optimization - return path_parts - site = self.server.sites[path_parts["address"]] - try: - path_parts["address"], path_parts["inner_path"] = getCorsPath(site, path_parts["inner_path"]) - except: - return None - return path_parts diff --git a/plugins/Cors/__init__.py b/plugins/Cors/__init__.py deleted file mode 100644 index bca1ab3e..00000000 --- a/plugins/Cors/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import CorsPlugin \ No newline at end of file diff --git a/plugins/CryptMessage/CryptMessage.py b/plugins/CryptMessage/CryptMessage.py deleted file mode 100644 index 955dd9b1..00000000 --- a/plugins/CryptMessage/CryptMessage.py +++ /dev/null @@ -1,53 +0,0 @@ -from lib.pybitcointools import bitcoin as btctools -import hashlib - -ecc_cache = {} - - -def encrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'): - from lib import pyelliptic - curve, pubkey_x, pubkey_y, i = pyelliptic.ECC._decode_pubkey(pubkey) - if ephemcurve is None: - ephemcurve = curve - ephem = pyelliptic.ECC(curve=ephemcurve) - key = hashlib.sha512(ephem.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() - key_e, key_m = key[:32], key[32:] - pubkey = ephem.get_pubkey() - iv = pyelliptic.OpenSSL.rand(pyelliptic.OpenSSL.get_cipher(ciphername).get_blocksize()) - ctx = pyelliptic.Cipher(key_e, iv, 1, ciphername) - ciphertext = iv + pubkey + ctx.ciphering(data) - mac = pyelliptic.hmac_sha256(key_m, ciphertext) - return key_e, ciphertext + mac - - -def split(encrypted): - iv = encrypted[0:16] - ciphertext = encrypted[16+70:-32] - - return iv, ciphertext - - -def getEcc(privatekey=None): - from lib import pyelliptic - global eccs - if privatekey not in ecc_cache: - if privatekey: - publickey_bin = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin") - publickey_openssl = toOpensslPublickey(publickey_bin) - privatekey_openssl = toOpensslPrivatekey(privatekey) - ecc_cache[privatekey] = pyelliptic.ECC(curve='secp256k1', privkey=privatekey_openssl, pubkey=publickey_openssl) - else: - ecc_cache[None] = pyelliptic.ECC() - return ecc_cache[privatekey] - - -def toOpensslPrivatekey(privatekey): - privatekey_bin = btctools.encode_privkey(privatekey, "bin") - return '\x02\xca\x00\x20' + privatekey_bin - - -def toOpensslPublickey(publickey): - publickey_bin = btctools.encode_pubkey(publickey, "bin") - publickey_bin = publickey_bin[1:] - publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:] - return publickey_openssl diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py deleted file mode 100644 index 71499eca..00000000 --- a/plugins/CryptMessage/CryptMessagePlugin.py +++ /dev/null @@ -1,149 +0,0 @@ -import base64 -import os - -from Plugin import PluginManager -from Crypt import CryptBitcoin -from lib.pybitcointools import bitcoin as btctools - -import CryptMessage - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def encrypt(self, text, publickey): - encrypted = CryptMessage.encrypt(text, CryptMessage.toOpensslPublickey(publickey)) - return encrypted - - def decrypt(self, encrypted, privatekey): - back = CryptMessage.getEcc(privatekey).decrypt(encrypted) - return back.decode("utf8") - - # - Actions - - - # Returns user's public key unique to site - # Return: Public key - def actionUserPublickey(self, to, index=0): - publickey = self.user.getEncryptPublickey(self.site.address, index) - self.response(to, publickey) - - # Encrypt a text using the publickey or user's sites unique publickey - # Return: Encrypted text using base64 encoding - def actionEciesEncrypt(self, to, text, publickey=0, return_aes_key=False): - if type(publickey) is int: # Encrypt using user's publickey - publickey = self.user.getEncryptPublickey(self.site.address, publickey) - aes_key, encrypted = self.encrypt(text.encode("utf8"), publickey.decode("base64")) - if return_aes_key: - self.response(to, [base64.b64encode(encrypted), base64.b64encode(aes_key)]) - else: - self.response(to, base64.b64encode(encrypted)) - - # Decrypt a text using privatekey or the user's site unique private key - # Return: Decrypted text or list of decrypted texts - def actionEciesDecrypt(self, to, param, privatekey=0): - if type(privatekey) is int: # Decrypt using user's privatekey - privatekey = self.user.getEncryptPrivatekey(self.site.address, privatekey) - - if type(param) == list: - encrypted_texts = param - else: - encrypted_texts = [param] - - texts = [] # Decoded texts - for encrypted_text in encrypted_texts: - try: - text = self.decrypt(encrypted_text.decode("base64"), privatekey) - texts.append(text) - except Exception as err: - texts.append(None) - - if type(param) == list: - self.response(to, texts) - else: - self.response(to, texts[0]) - - # Encrypt a text using AES - # Return: Iv, AES key, Encrypted text - def actionAesEncrypt(self, to, text, key=None, iv=None): - from lib import pyelliptic - - if key: - key = key.decode("base64") - else: - key = os.urandom(32) - - if iv: # Generate new AES key if not definied - iv = iv.decode("base64") - else: - iv = pyelliptic.Cipher.gen_IV('aes-256-cbc') - - if text: - encrypted = pyelliptic.Cipher(key, iv, 1, ciphername='aes-256-cbc').ciphering(text.encode("utf8")) - else: - encrypted = "" - - self.response(to, [base64.b64encode(key), base64.b64encode(iv), base64.b64encode(encrypted)]) - - # Decrypt a text using AES - # Return: Decrypted text - def actionAesDecrypt(self, to, *args): - from lib import pyelliptic - - if len(args) == 3: # Single decrypt - encrypted_texts = [(args[0], args[1])] - keys = [args[2]] - else: # Batch decrypt - encrypted_texts, keys = args - - texts = [] # Decoded texts - for iv, encrypted_text in encrypted_texts: - encrypted_text = encrypted_text.decode("base64") - iv = iv.decode("base64") - text = None - for key in keys: - ctx = pyelliptic.Cipher(key.decode("base64"), iv, 0, ciphername='aes-256-cbc') - try: - decrypted = ctx.ciphering(encrypted_text) - if decrypted and decrypted.decode("utf8"): # Valid text decoded - text = decrypted - except Exception, err: - pass - texts.append(text) - - if len(args) == 3: - self.response(to, texts[0]) - else: - self.response(to, texts) - - -@PluginManager.registerTo("User") -class UserPlugin(object): - def getEncryptPrivatekey(self, address, param_index=0): - assert param_index >= 0 and param_index <= 1000 - site_data = self.getSiteData(address) - - if site_data.get("cert"): # Different privatekey for different cert provider - index = param_index + self.getAddressAuthIndex(site_data["cert"]) - else: - index = param_index - - if "encrypt_privatekey_%s" % index not in site_data: - address_index = self.getAddressAuthIndex(address) - crypt_index = address_index + 1000 + index - site_data["encrypt_privatekey_%s" % index] = CryptBitcoin.hdPrivatekey(self.master_seed, crypt_index) - self.log.debug("New encrypt privatekey generated for %s:%s" % (address, index)) - return site_data["encrypt_privatekey_%s" % index] - - def getEncryptPublickey(self, address, param_index=0): - assert param_index >= 0 and param_index <= 1000 - site_data = self.getSiteData(address) - - if site_data.get("cert"): # Different privatekey for different cert provider - index = param_index + self.getAddressAuthIndex(site_data["cert"]) - else: - index = param_index - - if "encrypt_publickey_%s" % index not in site_data: - privatekey = self.getEncryptPrivatekey(address, param_index) - publickey = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin_compressed") - site_data["encrypt_publickey_%s" % index] = base64.b64encode(publickey) - return site_data["encrypt_publickey_%s" % index] diff --git a/plugins/CryptMessage/Test/TestCrypt.py b/plugins/CryptMessage/Test/TestCrypt.py deleted file mode 100644 index f3098a76..00000000 --- a/plugins/CryptMessage/Test/TestCrypt.py +++ /dev/null @@ -1,109 +0,0 @@ -import pytest -from CryptMessage import CryptMessage - -@pytest.mark.usefixtures("resetSettings") -class TestCrypt: - def testPublickey(self, ui_websocket): - pub = ui_websocket.testAction("UserPublickey", 0) - assert len(pub) == 44 # Compressed, b64 encoded publickey - - # Different pubkey for specificed index - assert ui_websocket.testAction("UserPublickey", 1) != ui_websocket.testAction("UserPublickey", 0) - - # Same publickey for same index - assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2) - - # Different publickey for different cert - site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) - site_data["cert"] = None - pub1 = ui_websocket.testAction("UserPublickey", 0) - - site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) - site_data["cert"] = "zeroid.bit" - pub2 = ui_websocket.testAction("UserPublickey", 0) - assert pub1 != pub2 - - - - def testEcies(self, ui_websocket): - ui_websocket.actionUserPublickey(0, 0) - pub = ui_websocket.ws.result - - ui_websocket.actionEciesEncrypt(0, "hello", pub) - encrypted = ui_websocket.ws.result - assert len(encrypted) == 180 - - # Don't allow decrypt using other privatekey index - ui_websocket.actionEciesDecrypt(0, encrypted, 123) - decrypted = ui_websocket.ws.result - assert decrypted != "hello" - - # Decrypt using correct privatekey - ui_websocket.actionEciesDecrypt(0, encrypted) - decrypted = ui_websocket.ws.result - assert decrypted == "hello" - - # Decrypt batch - ui_websocket.actionEciesDecrypt(0, [encrypted, "baad", encrypted]) - decrypted = ui_websocket.ws.result - assert decrypted == ["hello", None, "hello"] - - - def testEciesUtf8(self, ui_websocket): - # Utf8 test - utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9p' - ui_websocket.actionEciesEncrypt(0, utf8_text) - encrypted = ui_websocket.ws.result - - ui_websocket.actionEciesDecrypt(0, encrypted) - assert ui_websocket.ws.result == utf8_text - - - def testEciesAes(self, ui_websocket): - ui_websocket.actionEciesEncrypt(0, "hello", return_aes_key=True) - ecies_encrypted, aes_key = ui_websocket.ws.result - - # Decrypt using Ecies - ui_websocket.actionEciesDecrypt(0, ecies_encrypted) - assert ui_websocket.ws.result == "hello" - - # Decrypt using AES - aes_iv, aes_encrypted = CryptMessage.split(ecies_encrypted.decode("base64")) - - ui_websocket.actionAesDecrypt(0, aes_iv.encode("base64"), aes_encrypted.encode("base64"), aes_key) - assert ui_websocket.ws.result == "hello" - - - def testAes(self, ui_websocket): - ui_websocket.actionAesEncrypt(0, "hello") - key, iv, encrypted = ui_websocket.ws.result - - assert len(key) == 44 - assert len(iv) == 24 - assert len(encrypted) == 24 - - # Single decrypt - ui_websocket.actionAesDecrypt(0, iv, encrypted, key) - assert ui_websocket.ws.result == "hello" - - # Batch decrypt - ui_websocket.actionAesEncrypt(0, "hello") - key2, iv2, encrypted2 = ui_websocket.ws.result - - assert [key, iv, encrypted] != [key2, iv2, encrypted2] - - # 2 correct key - ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key]) - assert ui_websocket.ws.result == ["hello", "hello", None, None] - - # 3 key - ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key, key2]) - assert ui_websocket.ws.result == ["hello", "hello", None, "hello"] - - def testAesUtf8(self, ui_websocket): - utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9' - ui_websocket.actionAesEncrypt(0, utf8_text) - key, iv, encrypted = ui_websocket.ws.result - - ui_websocket.actionAesDecrypt(0, iv, encrypted, key) - assert ui_websocket.ws.result == utf8_text diff --git a/plugins/CryptMessage/Test/conftest.py b/plugins/CryptMessage/Test/conftest.py deleted file mode 100644 index 8c1df5b2..00000000 --- a/plugins/CryptMessage/Test/conftest.py +++ /dev/null @@ -1 +0,0 @@ -from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/CryptMessage/Test/pytest.ini b/plugins/CryptMessage/Test/pytest.ini deleted file mode 100644 index d09210d1..00000000 --- a/plugins/CryptMessage/Test/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = Test*.py -addopts = -rsxX -v --durations=6 -markers = - webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/CryptMessage/__init__.py b/plugins/CryptMessage/__init__.py deleted file mode 100644 index 3eb41820..00000000 --- a/plugins/CryptMessage/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import CryptMessagePlugin \ No newline at end of file diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py deleted file mode 100644 index 8d662bba..00000000 --- a/plugins/FilePack/FilePackPlugin.py +++ /dev/null @@ -1,194 +0,0 @@ -import os -import re - -import gevent - -from Plugin import PluginManager -from Config import config -from Debug import Debug - - -# Keep archive open for faster reponse times for large sites -archive_cache = {} - - -def closeArchive(archive_path): - if archive_path in archive_cache: - del archive_cache[archive_path] - - -def openArchive(archive_path, file_obj=None): - if archive_path not in archive_cache: - if archive_path.endswith("tar.gz"): - import tarfile - archive_cache[archive_path] = tarfile.open(file_obj or archive_path, "r:gz") - elif archive_path.endswith("tar.bz2"): - import tarfile - archive_cache[archive_path] = tarfile.open(file_obj or archive_path, "r:bz2") - else: - import zipfile - archive_cache[archive_path] = zipfile.ZipFile(file_obj or archive_path) - gevent.spawn_later(5, lambda: closeArchive(archive_path)) # Close after 5 sec - - archive = archive_cache[archive_path] - return archive - - -def openArchiveFile(archive_path, path_within, file_obj=None): - archive = openArchive(archive_path, file_obj=file_obj) - if archive_path.endswith(".zip"): - return archive.open(path_within) - else: - return archive.extractfile(path_within.encode("utf8")) - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - def actionSiteMedia(self, path, **kwargs): - if ".zip/" in path or ".tar.gz/" in path: - file_obj = None - path_parts = self.parsePath(path) - file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8")) - match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path) - archive_path, path_within = match.groups() - if archive_path not in archive_cache: - site = self.server.site_manager.get(path_parts["address"]) - if not site: - return self.actionSiteAddPrompt(path) - archive_inner_path = site.storage.getInnerPath(archive_path) - if not os.path.isfile(archive_path): - # Wait until file downloads - result = site.needFile(archive_inner_path, priority=10) - # Send virutal file path download finished event to remove loading screen - site.updateWebsocket(file_done=archive_inner_path) - if not result: - return self.error404(archive_inner_path) - file_obj = site.storage.openBigfile(archive_inner_path) - - header_allow_ajax = False - if self.get.get("ajax_key"): - requester_site = self.server.site_manager.get(path_parts["request_address"]) - if self.get["ajax_key"] == requester_site.settings["ajax_key"]: - header_allow_ajax = True - else: - return self.error403("Invalid ajax_key") - - try: - file = openArchiveFile(archive_path, path_within, file_obj=file_obj) - content_type = self.getContentType(file_path) - self.sendHeader(200, content_type=content_type, noscript=kwargs.get("header_noscript", False), allow_ajax=header_allow_ajax) - return self.streamFile(file) - except Exception as err: - self.log.debug("Error opening archive file: %s" % Debug.formatException(err)) - return self.error404(path) - - return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs) - - def streamFile(self, file): - for i in range(100): # Read max 6MB - try: - block = file.read(60 * 1024) - if block: - yield block - else: - raise StopIteration - except StopIteration: - file.close() - break - - -@PluginManager.registerTo("SiteStorage") -class SiteStoragePlugin(object): - def isFile(self, inner_path): - if ".zip/" in inner_path or ".tar.gz/" in inner_path: - match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", inner_path) - archive_inner_path, path_within = match.groups() - return super(SiteStoragePlugin, self).isFile(archive_inner_path) - else: - return super(SiteStoragePlugin, self).isFile(inner_path) - - def openArchive(self, inner_path): - archive_path = self.getPath(inner_path) - file_obj = None - if archive_path not in archive_cache: - if not os.path.isfile(archive_path): - result = self.site.needFile(inner_path, priority=10) - self.site.updateWebsocket(file_done=inner_path) - if not result: - raise Exception("Unable to download file") - file_obj = self.site.storage.openBigfile(inner_path) - - try: - archive = openArchive(archive_path, file_obj=file_obj) - except Exception as err: - raise Exception("Unable to download file: %s" % err) - - return archive - - def walk(self, inner_path, *args, **kwags): - if ".zip" in inner_path or ".tar.gz" in inner_path: - match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) - archive_inner_path, path_within = match.groups() - archive = self.openArchive(archive_inner_path) - path_within = path_within.lstrip("/") - - if archive_inner_path.endswith(".zip"): - namelist = [name for name in archive.namelist() if not name.endswith("/")] - else: - namelist = [item.name for item in archive.getmembers() if not item.isdir()] - - namelist_relative = [] - for name in namelist: - if not name.startswith(path_within): - continue - name_relative = name.replace(path_within, "", 1).rstrip("/") - namelist_relative.append(name_relative) - - return namelist_relative - - else: - return super(SiteStoragePlugin, self).walk(inner_path, *args, **kwags) - - def list(self, inner_path, *args, **kwags): - if ".zip" in inner_path or ".tar.gz" in inner_path: - match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) - archive_inner_path, path_within = match.groups() - archive = self.openArchive(archive_inner_path) - path_within = path_within.lstrip("/") - - if archive_inner_path.endswith(".zip"): - namelist = [name for name in archive.namelist()] - else: - namelist = [item.name for item in archive.getmembers()] - - namelist_relative = [] - for name in namelist: - if not name.startswith(path_within): - continue - name_relative = name.replace(path_within, "", 1).rstrip("/") - - if "/" in name_relative: # File is in sub-directory - continue - - namelist_relative.append(name_relative) - return namelist_relative - - else: - return super(SiteStoragePlugin, self).list(inner_path, *args, **kwags) - - def read(self, inner_path, mode="r"): - if ".zip/" in inner_path or ".tar.gz/" in inner_path: - match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) - archive_inner_path, path_within = match.groups() - archive = self.openArchive(archive_inner_path) - path_within = path_within.lstrip("/") - print archive, archive_inner_path - - if archive_inner_path.endswith(".zip"): - return archive.open(path_within).read() - else: - return archive.extractfile(path_within.encode("utf8")).read() - - else: - return super(SiteStoragePlugin, self).read(inner_path, mode) - diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py deleted file mode 100644 index ab07a1ff..00000000 --- a/plugins/FilePack/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import FilePackPlugin \ No newline at end of file diff --git a/plugins/MergerSite/MergerSitePlugin.py b/plugins/MergerSite/MergerSitePlugin.py deleted file mode 100644 index 3de92a91..00000000 --- a/plugins/MergerSite/MergerSitePlugin.py +++ /dev/null @@ -1,384 +0,0 @@ -import re -import time -import copy - -from Plugin import PluginManager -from Translate import Translate -from util import RateLimit -from util import helper -from Debug import Debug -try: - import OptionalManager.UiWebsocketPlugin # To make optioanlFileInfo merger sites compatible -except Exception: - pass - -if "merger_db" not in locals().keys(): # To keep merger_sites between module reloads - merger_db = {} # Sites that allowed to list other sites {address: [type1, type2...]} - merged_db = {} # Sites that allowed to be merged to other sites {address: type, ...} - merged_to_merger = {} # {address: [site1, site2, ...]} cache - site_manager = None # Site manager for merger sites - -if "_" not in locals(): - _ = Translate("plugins/MergerSite/languages/") - - -# Check if the site has permission to this merger site -def checkMergerPath(address, inner_path): - merged_match = re.match("^merged-(.*?)/([A-Za-z0-9]{26,35})/", inner_path) - if merged_match: - merger_type = merged_match.group(1) - # Check if merged site is allowed to include other sites - if merger_type in merger_db.get(address, []): - # Check if included site allows to include - merged_address = merged_match.group(2) - if merged_db.get(merged_address) == merger_type: - inner_path = re.sub("^merged-(.*?)/([A-Za-z0-9]{26,35})/", "", inner_path) - return merged_address, inner_path - else: - raise Exception( - "Merger site (%s) does not have permission for merged site: %s (%s)" % - (merger_type, merged_address, merged_db.get(merged_address)) - ) - else: - raise Exception("No merger (%s) permission to load:
%s (%s not in %s)" % ( - address, inner_path, merger_type, merger_db.get(address, [])) - ) - else: - raise Exception("Invalid merger path: %s" % inner_path) - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - # Download new site - def actionMergerSiteAdd(self, to, addresses): - if type(addresses) != list: - # Single site add - addresses = [addresses] - # Check if the site has merger permission - merger_types = merger_db.get(self.site.address) - if not merger_types: - return self.response(to, {"error": "Not a merger site"}) - - if RateLimit.isAllowed(self.site.address + "-MergerSiteAdd", 10) and len(addresses) == 1: - # Without confirmation if only one site address and not called in last 10 sec - self.cbMergerSiteAdd(to, addresses) - else: - self.cmd( - "confirm", - [_["Add %s new site?"] % len(addresses), "Add"], - lambda (res): self.cbMergerSiteAdd(to, addresses) - ) - self.response(to, "ok") - - # Callback of adding new site confirmation - def cbMergerSiteAdd(self, to, addresses): - added = 0 - for address in addresses: - added += 1 - site_manager.need(address) - if added: - self.cmd("notification", ["done", _["Added %s new site"] % added, 5000]) - RateLimit.called(self.site.address + "-MergerSiteAdd") - site_manager.updateMergerSites() - - # Delete a merged site - def actionMergerSiteDelete(self, to, address): - site = self.server.sites.get(address) - if not site: - return self.response(to, {"error": "No site found: %s" % address}) - - merger_types = merger_db.get(self.site.address) - if not merger_types: - return self.response(to, {"error": "Not a merger site"}) - if merged_db.get(address) not in merger_types: - return self.response(to, {"error": "Merged type (%s) not in %s" % (merged_db.get(address), merger_types)}) - - self.cmd("notification", ["done", _["Site deleted: %s"] % address, 5000]) - self.response(to, "ok") - - # Lists merged sites - def actionMergerSiteList(self, to, query_site_info=False): - merger_types = merger_db.get(self.site.address) - ret = {} - if not merger_types: - return self.response(to, {"error": "Not a merger site"}) - for address, merged_type in merged_db.iteritems(): - if merged_type not in merger_types: - continue # Site not for us - if query_site_info: - site = self.server.sites.get(address) - ret[address] = self.formatSiteInfo(site, create_user=False) - else: - ret[address] = merged_type - self.response(to, ret) - - def hasSitePermission(self, address, *args, **kwargs): - if super(UiWebsocketPlugin, self).hasSitePermission(address, *args, **kwargs): - return True - else: - if self.site.address in [merger_site.address for merger_site in merged_to_merger.get(address, [])]: - return True - else: - return False - - # Add support merger sites for file commands - def mergerFuncWrapper(self, func_name, to, inner_path, *args, **kwargs): - if inner_path.startswith("merged-"): - merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) - - # Set the same cert for merged site - merger_cert = self.user.getSiteData(self.site.address).get("cert") - if merger_cert and self.user.getSiteData(merged_address).get("cert") != merger_cert: - self.user.setCert(merged_address, merger_cert) - - req_self = copy.copy(self) - req_self.site = self.server.sites.get(merged_address) # Change the site to the merged one - - func = getattr(super(UiWebsocketPlugin, req_self), func_name) - return func(to, merged_inner_path, *args, **kwargs) - else: - func = getattr(super(UiWebsocketPlugin, self), func_name) - return func(to, inner_path, *args, **kwargs) - - def actionFileList(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionFileList", to, inner_path, *args, **kwargs) - - def actionDirList(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionDirList", to, inner_path, *args, **kwargs) - - def actionFileGet(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs) - - def actionFileWrite(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionFileWrite", to, inner_path, *args, **kwargs) - - def actionFileDelete(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionFileDelete", to, inner_path, *args, **kwargs) - - def actionFileRules(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs) - - def actionFileNeed(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionFileNeed", to, inner_path, *args, **kwargs) - - def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs) - - def actionOptionalFileDelete(self, to, inner_path, *args, **kwargs): - return self.mergerFuncWrapper("actionOptionalFileDelete", to, inner_path, *args, **kwargs) - - def actionBigfileUploadInit(self, to, inner_path, *args, **kwargs): - back = self.mergerFuncWrapper("actionBigfileUploadInit", to, inner_path, *args, **kwargs) - if inner_path.startswith("merged-"): - merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) - back["inner_path"] = "merged-%s/%s/%s" % (merged_db[merged_address], merged_address, back["inner_path"]) - return back - - # Add support merger sites for file commands with privatekey parameter - def mergerFuncWrapperWithPrivatekey(self, func_name, to, privatekey, inner_path, *args, **kwargs): - func = getattr(super(UiWebsocketPlugin, self), func_name) - if inner_path.startswith("merged-"): - merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) - merged_site = self.server.sites.get(merged_address) - - # Set the same cert for merged site - merger_cert = self.user.getSiteData(self.site.address).get("cert") - if merger_cert: - self.user.setCert(merged_address, merger_cert) - - site_before = self.site # Save to be able to change it back after we ran the command - self.site = merged_site # Change the site to the merged one - try: - back = func(to, privatekey, merged_inner_path, *args, **kwargs) - finally: - self.site = site_before # Change back to original site - return back - else: - return func(to, privatekey, inner_path, *args, **kwargs) - - def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): - return self.mergerFuncWrapperWithPrivatekey("actionSiteSign", to, privatekey, inner_path, *args, **kwargs) - - def actionSitePublish(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): - return self.mergerFuncWrapperWithPrivatekey("actionSitePublish", to, privatekey, inner_path, *args, **kwargs) - - def actionPermissionAdd(self, to, permission): - super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission) - if permission.startswith("Merger"): - self.site.storage.rebuildDb() - - def actionPermissionDetails(self, to, permission): - if not permission.startswith("Merger"): - return super(UiWebsocketPlugin, self).actionPermissionDetails(to, permission) - - merger_type = permission.replace("Merger:", "") - if not re.match("^[A-Za-z0-9-]+$", merger_type): - raise Exception("Invalid merger_type: %s" % merger_type) - merged_sites = [] - for address, merged_type in merged_db.iteritems(): - if merged_type != merger_type: - continue - site = self.server.sites.get(address) - try: - merged_sites.append(site.content_manager.contents.get("content.json").get("title", address)) - except Exception as err: - merged_sites.append(address) - - details = _["Read and write permissions to sites with merged type of %s "] % merger_type - details += _["(%s sites)"] % len(merged_sites) - details += "
%s
" % ", ".join(merged_sites) - self.response(to, details) - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - # Allow to load merged site files using /merged-ZeroMe/address/file.jpg - def parsePath(self, path): - path_parts = super(UiRequestPlugin, self).parsePath(path) - if "merged-" not in path: # Optimization - return path_parts - path_parts["address"], path_parts["inner_path"] = checkMergerPath(path_parts["address"], path_parts["inner_path"]) - return path_parts - - -@PluginManager.registerTo("SiteStorage") -class SiteStoragePlugin(object): - # Also rebuild from merged sites - def getDbFiles(self): - merger_types = merger_db.get(self.site.address) - - # First return the site's own db files - for item in super(SiteStoragePlugin, self).getDbFiles(): - yield item - - # Not a merger site, that's all - if not merger_types: - raise StopIteration - - merged_sites = [ - site_manager.sites[address] - for address, merged_type in merged_db.iteritems() - if merged_type in merger_types - ] - found = 0 - for merged_site in merged_sites: - self.log.debug("Loading merged site: %s" % merged_site) - merged_type = merged_db[merged_site.address] - for content_inner_path, content in merged_site.content_manager.contents.iteritems(): - # content.json file itself - if merged_site.storage.isFile(content_inner_path): # Missing content.json file - merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path) - yield merged_inner_path, merged_site.storage.getPath(content_inner_path) - else: - merged_site.log.error("[MISSING] %s" % content_inner_path) - # Data files in content.json - content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): - if not file_relative_path.endswith(".json"): - continue # We only interesed in json files - file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir - file_inner_path = file_inner_path.strip("/") # Strip leading / - if merged_site.storage.isFile(file_inner_path): - merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path) - yield merged_inner_path, merged_site.storage.getPath(file_inner_path) - else: - merged_site.log.error("[MISSING] %s" % file_inner_path) - found += 1 - if found % 100 == 0: - time.sleep(0.000001) # Context switch to avoid UI block - - # Also notice merger sites on a merged site file change - def onUpdated(self, inner_path, file=None): - super(SiteStoragePlugin, self).onUpdated(inner_path, file) - - merged_type = merged_db.get(self.site.address) - - for merger_site in merged_to_merger.get(self.site.address, []): - if merger_site.address == self.site.address: # Avoid infinite loop - continue - virtual_path = "merged-%s/%s/%s" % (merged_type, self.site.address, inner_path) - if inner_path.endswith(".json"): - if file is not None: - merger_site.storage.onUpdated(virtual_path, file=file) - else: - merger_site.storage.onUpdated(virtual_path, file=self.open(inner_path)) - else: - merger_site.storage.onUpdated(virtual_path) - - -@PluginManager.registerTo("Site") -class SitePlugin(object): - def fileDone(self, inner_path): - super(SitePlugin, self).fileDone(inner_path) - - for merger_site in merged_to_merger.get(self.address, []): - if merger_site.address == self.address: - continue - for ws in merger_site.websockets: - ws.event("siteChanged", self, {"event": ["file_done", inner_path]}) - - def fileFailed(self, inner_path): - super(SitePlugin, self).fileFailed(inner_path) - - for merger_site in merged_to_merger.get(self.address, []): - if merger_site.address == self.address: - continue - for ws in merger_site.websockets: - ws.event("siteChanged", self, {"event": ["file_failed", inner_path]}) - - -@PluginManager.registerTo("SiteManager") -class SiteManagerPlugin(object): - # Update merger site for site types - def updateMergerSites(self): - global merger_db, merged_db, merged_to_merger, site_manager - s = time.time() - merger_db = {} - merged_db = {} - merged_to_merger = {} - site_manager = self - if not self.sites: - return - for site in self.sites.itervalues(): - # Update merged sites - try: - merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type") - except Exception, err: - self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err))) - continue - if merged_type: - merged_db[site.address] = merged_type - - # Update merger sites - for permission in site.settings["permissions"]: - if not permission.startswith("Merger:"): - continue - if merged_type: - self.log.error( - "Removing permission %s from %s: Merger and merged at the same time." % - (permission, site.address) - ) - site.settings["permissions"].remove(permission) - continue - merger_type = permission.replace("Merger:", "") - if site.address not in merger_db: - merger_db[site.address] = [] - merger_db[site.address].append(merger_type) - site_manager.sites[site.address] = site - - # Update merged to merger - if merged_type: - for merger_site in self.sites.itervalues(): - if "Merger:" + merged_type in merger_site.settings["permissions"]: - if site.address not in merged_to_merger: - merged_to_merger[site.address] = [] - merged_to_merger[site.address].append(merger_site) - self.log.debug("Updated merger sites in %.3fs" % (time.time() - s)) - - def load(self, *args, **kwags): - super(SiteManagerPlugin, self).load(*args, **kwags) - self.updateMergerSites() - - def save(self, *args, **kwags): - super(SiteManagerPlugin, self).save(*args, **kwags) - self.updateMergerSites() diff --git a/plugins/MergerSite/__init__.py b/plugins/MergerSite/__init__.py deleted file mode 100644 index f1f3412c..00000000 --- a/plugins/MergerSite/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import MergerSitePlugin \ No newline at end of file diff --git a/plugins/MergerSite/languages/es.json b/plugins/MergerSite/languages/es.json deleted file mode 100644 index d554c3a9..00000000 --- a/plugins/MergerSite/languages/es.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "¿Agregar %s nuevo sitio?", - "Added %s new site": "Sitio %s agregado", - "Site deleted: %s": "Sitio removido: %s" -} diff --git a/plugins/MergerSite/languages/fr.json b/plugins/MergerSite/languages/fr.json deleted file mode 100644 index 9d59fde9..00000000 --- a/plugins/MergerSite/languages/fr.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "Ajouter le site %s ?", - "Added %s new site": "Site %s ajouté", - "Site deleted: %s": "Site %s supprimé" -} diff --git a/plugins/MergerSite/languages/hu.json b/plugins/MergerSite/languages/hu.json deleted file mode 100644 index 8e377aaa..00000000 --- a/plugins/MergerSite/languages/hu.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "Új oldal hozzáadása: %s?", - "Added %s new site": "Új oldal hozzáadva: %s", - "Site deleted: %s": "Oldal törölve: %s" -} diff --git a/plugins/MergerSite/languages/it.json b/plugins/MergerSite/languages/it.json deleted file mode 100644 index d56c9817..00000000 --- a/plugins/MergerSite/languages/it.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "Aggiungere %s nuovo sito ?", - "Added %s new site": "Sito %s aggiunto", - "Site deleted: %s": "Sito %s eliminato" -} diff --git a/plugins/MergerSite/languages/pt-br.json b/plugins/MergerSite/languages/pt-br.json deleted file mode 100644 index cdc298cb..00000000 --- a/plugins/MergerSite/languages/pt-br.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "Adicionar %s novo site?", - "Added %s new site": "Site %s adicionado", - "Site deleted: %s": "Site removido: %s" -} diff --git a/plugins/MergerSite/languages/tr.json b/plugins/MergerSite/languages/tr.json deleted file mode 100644 index 5afb3942..00000000 --- a/plugins/MergerSite/languages/tr.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "%s sitesi eklensin mi?", - "Added %s new site": "%s sitesi eklendi", - "Site deleted: %s": "%s sitesi silindi" -} diff --git a/plugins/MergerSite/languages/zh-tw.json b/plugins/MergerSite/languages/zh-tw.json deleted file mode 100644 index a0684e63..00000000 --- a/plugins/MergerSite/languages/zh-tw.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "添加新網站: %s?", - "Added %s new site": "已添加到新網站:%s", - "Site deleted: %s": "網站已刪除:%s" -} diff --git a/plugins/MergerSite/languages/zh.json b/plugins/MergerSite/languages/zh.json deleted file mode 100644 index 127044e6..00000000 --- a/plugins/MergerSite/languages/zh.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "Add %s new site?": "添加新站点: %s?", - "Added %s new site": "已添加到新站点:%s", - "Site deleted: %s": "站点已删除:%s" -} diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py deleted file mode 100644 index 4e54fae3..00000000 --- a/plugins/Newsfeed/NewsfeedPlugin.py +++ /dev/null @@ -1,188 +0,0 @@ -import time -import re - -from Plugin import PluginManager -from Db import DbQuery -from Debug import Debug -from util import helper - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def formatSiteInfo(self, site, create_user=True): - site_info = super(UiWebsocketPlugin, self).formatSiteInfo(site, create_user=create_user) - feed_following = self.user.sites.get(site.address, {}).get("follow", None) - if feed_following == None: - site_info["feed_follow_num"] = None - else: - site_info["feed_follow_num"] = len(feed_following) - return site_info - - def actionFeedFollow(self, to, feeds): - self.user.setFeedFollow(self.site.address, feeds) - self.user.save() - self.response(to, "ok") - - def actionFeedListFollow(self, to): - feeds = self.user.sites[self.site.address].get("follow", {}) - self.response(to, feeds) - - def actionFeedQuery(self, to, limit=10, day_limit=3): - if "ADMIN" not in self.site.settings["permissions"]: - return self.response(to, "FeedQuery not allowed") - - from Site import SiteManager - rows = [] - stats = [] - - total_s = time.time() - num_sites = 0 - - for address, site_data in self.user.sites.items(): - feeds = site_data.get("follow") - if not feeds: - continue - if type(feeds) is not dict: - self.log.debug("Invalid feed for site %s" % address) - continue - num_sites += 1 - for name, query_set in feeds.iteritems(): - site = SiteManager.site_manager.get(address) - if not site or not site.storage.has_db: - continue - - s = time.time() - try: - query_raw, params = query_set - query_parts = re.split(r"UNION(?:\s+ALL|)", query_raw) - for i, query_part in enumerate(query_parts): - db_query = DbQuery(query_part) - if day_limit: - where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit) - if "WHERE" in query_part: - query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part) - else: - query_part += where - query_parts[i] = query_part - query = " UNION ".join(query_parts) - - if ":params" in query: - query_params = map(helper.sqlquote, params) - query = query.replace(":params", ",".join(query_params)) - - res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit) - - except Exception as err: # Log error - self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) - stats.append({"site": site.address, "feed_name": name, "error": str(err)}) - continue - - for row in res: - row = dict(row) - if not isinstance(row["date_added"], (int, long, float, complex)): - self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"])) - continue - if row["date_added"] > 1000000000000: # Formatted as millseconds - row["date_added"] = row["date_added"] / 1000 - if "date_added" not in row or row["date_added"] > time.time() + 120: - self.log.debug("Newsfeed item from the future from from site %s" % address) - continue # Feed item is in the future, skip it - row["site"] = address - row["feed_name"] = name - rows.append(row) - stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)}) - time.sleep(0.0001) - return self.response(to, {"rows": rows, "stats": stats, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3)}) - - def parseSearch(self, search): - parts = re.split("(site|type):", search) - if len(parts) > 1: # Found filter - search_text = parts[0] - parts = [part.strip() for part in parts] - filters = dict(zip(parts[1::2], parts[2::2])) - else: - search_text = search - filters = {} - return [search_text, filters] - - def actionFeedSearch(self, to, search, limit=30, day_limit=30): - if "ADMIN" not in self.site.settings["permissions"]: - return self.response(to, "FeedSearch not allowed") - - from Site import SiteManager - rows = [] - stats = [] - num_sites = 0 - total_s = time.time() - - search_text, filters = self.parseSearch(search) - - for address, site in SiteManager.site_manager.list().iteritems(): - if not site.storage.has_db: - continue - - if "site" in filters: - if filters["site"].lower() not in [site.address, site.content_manager.contents["content.json"].get("title").lower()]: - continue - - if site.storage.db: # Database loaded - feeds = site.storage.db.schema.get("feeds") - else: - try: - feeds = site.storage.loadJson("dbschema.json").get("feeds") - except: - continue - - if not feeds: - continue - - num_sites += 1 - - for name, query in feeds.iteritems(): - s = time.time() - try: - db_query = DbQuery(query) - - params = [] - # Filters - if search_text: - db_query.wheres.append("(%s LIKE ? OR %s LIKE ?)" % (db_query.fields["body"], db_query.fields["title"])) - search_like = "%" + search_text.replace(" ", "%") + "%" - params.append(search_like) - params.append(search_like) - if filters.get("type") and filters["type"] not in query: - continue - - if day_limit: - db_query.wheres.append( - "%s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit) - ) - - # Order - db_query.parts["ORDER BY"] = "date_added DESC" - db_query.parts["LIMIT"] = str(limit) - - res = site.storage.query(str(db_query), params) - except Exception, err: - self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) - stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query}) - continue - for row in res: - row = dict(row) - if row["date_added"] > time.time() + 120: - continue # Feed item is in the future, skip it - row["site"] = address - row["feed_name"] = name - rows.append(row) - stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)}) - return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3), "stats": stats}) - - -@PluginManager.registerTo("User") -class UserPlugin(object): - # Set queries that user follows - def setFeedFollow(self, address, feeds): - site_data = self.getSiteData(address) - site_data["follow"] = feeds - self.save() - return site_data diff --git a/plugins/Newsfeed/__init__.py b/plugins/Newsfeed/__init__.py deleted file mode 100644 index 20cc04a1..00000000 --- a/plugins/Newsfeed/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import NewsfeedPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py deleted file mode 100644 index 1a1f10af..00000000 --- a/plugins/OptionalManager/ContentDbPlugin.py +++ /dev/null @@ -1,422 +0,0 @@ -import time -import collections -import itertools -import re - -import gevent - -from util import helper -from Plugin import PluginManager -from Config import config -from Debug import Debug - -if "content_db" not in locals().keys(): # To keep between module reloads - content_db = None - - -@PluginManager.registerTo("ContentDb") -class ContentDbPlugin(object): - def __init__(self, *args, **kwargs): - global content_db - content_db = self - self.filled = {} # Site addresses that already filled from content.json - self.need_filling = False # file_optional table just created, fill data from content.json files - self.time_peer_numbers_updated = 0 - self.my_optional_files = {} # Last 50 site_address/inner_path called by fileWrite (auto-pinning these files) - self.optional_files = collections.defaultdict(dict) - self.optional_files_loading = False - helper.timer(60 * 5, self.checkOptionalLimit) - super(ContentDbPlugin, self).__init__(*args, **kwargs) - - def getSchema(self): - schema = super(ContentDbPlugin, self).getSchema() - - # Need file_optional table - schema["tables"]["file_optional"] = { - "cols": [ - ["file_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"], - ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"], - ["inner_path", "TEXT"], - ["hash_id", "INTEGER"], - ["size", "INTEGER"], - ["peer", "INTEGER DEFAULT 0"], - ["uploaded", "INTEGER DEFAULT 0"], - ["is_downloaded", "INTEGER DEFAULT 0"], - ["is_pinned", "INTEGER DEFAULT 0"], - ["time_added", "INTEGER DEFAULT 0"], - ["time_downloaded", "INTEGER DEFAULT 0"], - ["time_accessed", "INTEGER DEFAULT 0"] - ], - "indexes": [ - "CREATE UNIQUE INDEX file_optional_key ON file_optional (site_id, inner_path)", - "CREATE INDEX is_downloaded ON file_optional (is_downloaded)" - ], - "schema_changed": 11 - } - - return schema - - def initSite(self, site): - super(ContentDbPlugin, self).initSite(site) - if self.need_filling: - self.fillTableFileOptional(site) - if not self.optional_files_loading: - gevent.spawn_later(1, self.loadFilesOptional) - self.optional_files_loading = True - - def checkTables(self): - changed_tables = super(ContentDbPlugin, self).checkTables() - if "file_optional" in changed_tables: - self.need_filling = True - return changed_tables - - # Load optional files ending - def loadFilesOptional(self): - s = time.time() - num = 0 - total = 0 - total_downloaded = 0 - res = content_db.execute("SELECT site_id, inner_path, size, is_downloaded FROM file_optional") - site_sizes = collections.defaultdict(lambda: collections.defaultdict(int)) - for row in res: - self.optional_files[row["site_id"]][row["inner_path"][-8:]] = 1 - num += 1 - - # Update site size stats - site_sizes[row["site_id"]]["size_optional"] += row["size"] - if row["is_downloaded"]: - site_sizes[row["site_id"]]["optional_downloaded"] += row["size"] - - # Site site size stats to sites.json settings - site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} - for site_id, stats in site_sizes.iteritems(): - site_address = site_ids_reverse.get(site_id) - if not site_address: - self.log.error("Not found site_id: %s" % site_id) - continue - site = self.sites[site_address] - site.settings["size_optional"] = stats["size_optional"] - site.settings["optional_downloaded"] = stats["optional_downloaded"] - total += stats["size_optional"] - total_downloaded += stats["optional_downloaded"] - - self.log.debug( - "Loaded %s optional files: %.2fMB, downloaded: %.2fMB in %.3fs" % - (num, float(total) / 1024 / 1024, float(total_downloaded) / 1024 / 1024, time.time() - s) - ) - - if self.need_filling and self.getOptionalLimitBytes() >= 0 and self.getOptionalLimitBytes() < total_downloaded: - limit_bytes = self.getOptionalLimitBytes() - limit_new = round((float(total_downloaded) / 1024 / 1024 / 1024) * 1.1, 2) # Current limit + 10% - self.log.debug( - "First startup after update and limit is smaller than downloaded files size (%.2fGB), increasing it from %.2fGB to %.2fGB" % - (float(total_downloaded) / 1024 / 1024 / 1024, float(limit_bytes) / 1024 / 1024 / 1024, limit_new) - ) - config.saveValue("optional_limit", limit_new) - config.optional_limit = str(limit_new) - - # Predicts if the file is optional - def isOptionalFile(self, site_id, inner_path): - return self.optional_files[site_id].get(inner_path[-8:]) - - # Fill file_optional table with optional files found in sites - def fillTableFileOptional(self, site): - s = time.time() - site_id = self.site_ids.get(site.address) - if not site_id: - return False - cur = self.getCursor() - cur.execute("BEGIN") - res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id) - num = 0 - for row in res.fetchall(): - content = site.content_manager.contents[row["inner_path"]] - try: - num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur) - except Exception as err: - self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err)) - cur.execute("COMMIT") - cur.close() - - # Set my files to pinned - from User import UserManager - user = UserManager.user_manager.get() - if not user: - user = UserManager.user_manager.create() - auth_address = user.getAuthAddress(site.address) - self.execute( - "UPDATE file_optional SET is_pinned = 1 WHERE site_id = :site_id AND inner_path LIKE :inner_path", - {"site_id": site_id, "inner_path": "%%/%s/%%" % auth_address} - ) - - self.log.debug( - "Filled file_optional table for %s in %.3fs (loaded: %s, is_pinned: %s)" % - (site.address, time.time() - s, num, self.cur.cursor.rowcount) - ) - self.filled[site.address] = True - - def setContentFilesOptional(self, site, content_inner_path, content, cur=None): - if not cur: - cur = self - try: - cur.execute("BEGIN") - except Exception as err: - self.log.warning("Transaction begin error %s %s: %s" % (site, content_inner_path, Debug.formatException(err))) - - num = 0 - site_id = self.site_ids[site.address] - content_inner_dir = helper.getDirname(content_inner_path) - for relative_inner_path, file in content.get("files_optional", {}).iteritems(): - file_inner_path = content_inner_dir + relative_inner_path - hash_id = int(file["sha512"][0:4], 16) - if hash_id in site.content_manager.hashfield: - is_downloaded = 1 - else: - is_downloaded = 0 - if site.address + "/" + content_inner_dir in self.my_optional_files: - is_pinned = 1 - else: - is_pinned = 0 - cur.insertOrUpdate("file_optional", { - "hash_id": hash_id, - "size": int(file["size"]) - }, { - "site_id": site_id, - "inner_path": file_inner_path - }, oninsert={ - "time_added": int(time.time()), - "time_downloaded": int(time.time()) if is_downloaded else 0, - "is_downloaded": is_downloaded, - "peer": is_downloaded, - "is_pinned": is_pinned - }) - self.optional_files[site_id][file_inner_path[-8:]] = 1 - num += 1 - - if cur == self: - try: - cur.execute("END") - except Exception as err: - self.log.warning("Transaction end error %s %s: %s" % (site, content_inner_path, Debug.formatException(err))) - return num - - def setContent(self, site, inner_path, content, size=0): - super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size) - old_content = site.content_manager.contents.get(inner_path, {}) - if (not self.need_filling or self.filled.get(site.address)) and ("files_optional" in content or "files_optional" in old_content): - self.setContentFilesOptional(site, inner_path, content) - # Check deleted files - if old_content: - old_files = old_content.get("files_optional", {}).keys() - new_files = content.get("files_optional", {}).keys() - content_inner_dir = helper.getDirname(inner_path) - deleted = [content_inner_dir + key for key in old_files if key not in new_files] - if deleted: - site_id = self.site_ids[site.address] - self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": deleted}) - - def deleteContent(self, site, inner_path): - content = site.content_manager.contents.get(inner_path) - if content and "files_optional" in content: - site_id = self.site_ids[site.address] - content_inner_dir = helper.getDirname(inner_path) - optional_inner_paths = [ - content_inner_dir + relative_inner_path - for relative_inner_path in content.get("files_optional", {}).keys() - ] - self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": optional_inner_paths}) - super(ContentDbPlugin, self).deleteContent(site, inner_path) - - def updatePeerNumbers(self): - s = time.time() - num_file = 0 - num_updated = 0 - num_site = 0 - for site in self.sites.values(): - if not site.content_manager.has_optional_files: - continue - if not site.settings["serving"]: - continue - has_updated_hashfield = next(( - peer - for peer in site.peers.itervalues() - if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated - ), None) - - if not has_updated_hashfield and site.content_manager.hashfield.time_changed < self.time_peer_numbers_updated: - continue - - hashfield_peers = itertools.chain.from_iterable( - peer.hashfield.storage - for peer in site.peers.itervalues() - if peer.has_hashfield - ) - peer_nums = collections.Counter( - itertools.chain( - hashfield_peers, - site.content_manager.hashfield - ) - ) - - site_id = self.site_ids[site.address] - if not site_id: - continue - - res = self.execute("SELECT file_id, hash_id, peer FROM file_optional WHERE ?", {"site_id": site_id}) - updates = {} - for row in res: - peer_num = peer_nums.get(row["hash_id"], 0) - if peer_num != row["peer"]: - updates[row["file_id"]] = peer_num - - self.execute("BEGIN") - for file_id, peer_num in updates.iteritems(): - self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id)) - self.execute("END") - - num_updated += len(updates) - num_file += len(peer_nums) - num_site += 1 - - self.time_peer_numbers_updated = time.time() - self.log.debug("%s/%s peer number for %s site updated in %.3fs" % (num_updated, num_file, num_site, time.time() - s)) - - def queryDeletableFiles(self): - # First return the files with atleast 10 seeder and not accessed in last week - query = """ - SELECT * FROM file_optional - WHERE peer > 10 AND %s - ORDER BY time_accessed < %s DESC, uploaded / size - """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7)) - limit_start = 0 - while 1: - num = 0 - res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) - for row in res: - yield row - num += 1 - if num < 50: - break - limit_start += 50 - - self.log.debug("queryDeletableFiles returning less-seeded files") - - # Then return files less seeder but still not accessed in last week - query = """ - SELECT * FROM file_optional - WHERE peer <= 10 AND %s - ORDER BY peer DESC, time_accessed < %s DESC, uploaded / size - """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7)) - limit_start = 0 - while 1: - num = 0 - res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) - for row in res: - yield row - num += 1 - if num < 50: - break - limit_start += 50 - - self.log.debug("queryDeletableFiles returning everyting") - - # At the end return all files - query = """ - SELECT * FROM file_optional - WHERE peer <= 10 AND %s - ORDER BY peer DESC, time_accessed, uploaded / size - """ % self.getOptionalUsedWhere() - limit_start = 0 - while 1: - num = 0 - res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) - for row in res: - yield row - num += 1 - if num < 50: - break - limit_start += 50 - - def getOptionalLimitBytes(self): - if config.optional_limit.endswith("%"): - limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit)) - limit_bytes = helper.getFreeSpace() * (limit_percent / 100) - else: - limit_bytes = float(re.sub("[^0-9.]", "", config.optional_limit)) * 1024 * 1024 * 1024 - return limit_bytes - - def getOptionalUsedWhere(self): - maxsize = config.optional_limit_exclude_minsize * 1024 * 1024 - query = "is_downloaded = 1 AND is_pinned = 0 AND size < %s" % maxsize - - # Don't delete optional files from owned sites - my_site_ids = [] - for address, site in self.sites.items(): - if site.settings["own"]: - my_site_ids.append(str(self.site_ids[address])) - - if my_site_ids: - query += " AND site_id NOT IN (%s)" % ", ".join(my_site_ids) - return query - - def getOptionalUsedBytes(self): - size = self.execute("SELECT SUM(size) FROM file_optional WHERE %s" % self.getOptionalUsedWhere()).fetchone()[0] - if not size: - size = 0 - return size - - def getOptionalNeedDelete(self, size): - if config.optional_limit.endswith("%"): - limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit)) - need_delete = size - ((helper.getFreeSpace() + size) * (limit_percent / 100)) - else: - need_delete = size - self.getOptionalLimitBytes() - return need_delete - - def checkOptionalLimit(self, limit=None): - if not limit: - limit = self.getOptionalLimitBytes() - - if limit < 0: - self.log.debug("Invalid limit for optional files: %s" % limit) - return False - - size = self.getOptionalUsedBytes() - - need_delete = self.getOptionalNeedDelete(size) - - self.log.debug( - "Optional size: %.1fMB/%.1fMB, Need delete: %.1fMB" % - (float(size) / 1024 / 1024, float(limit) / 1024 / 1024, float(need_delete) / 1024 / 1024) - ) - if need_delete <= 0: - return False - - self.updatePeerNumbers() - - site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} - deleted_file_ids = [] - for row in self.queryDeletableFiles(): - site_address = site_ids_reverse.get(row["site_id"]) - site = self.sites.get(site_address) - if not site: - self.log.error("No site found for id: %s" % row["site_id"]) - continue - site.log.debug("Deleting %s %.3f MB left" % (row["inner_path"], float(need_delete) / 1024 / 1024)) - deleted_file_ids.append(row["file_id"]) - try: - site.content_manager.optionalRemoved(row["inner_path"], row["hash_id"], row["size"]) - site.storage.delete(row["inner_path"]) - need_delete -= row["size"] - except Exception as err: - site.log.error("Error deleting %s: %s" % (row["inner_path"], err)) - - if need_delete <= 0: - break - - cur = self.getCursor() - cur.execute("BEGIN") - for file_id in deleted_file_ids: - cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id}) - cur.execute("COMMIT") - cur.close() diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py deleted file mode 100644 index 9da93041..00000000 --- a/plugins/OptionalManager/OptionalManagerPlugin.py +++ /dev/null @@ -1,229 +0,0 @@ -import time -import re -import collections - -import gevent - -from util import helper -from Plugin import PluginManager -import ContentDbPlugin - - -# We can only import plugin host clases after the plugins are loaded -@PluginManager.afterLoad -def importPluginnedClasses(): - global config - from Config import config - - -def processAccessLog(): - if access_log: - content_db = ContentDbPlugin.content_db - now = int(time.time()) - num = 0 - for site_id in access_log: - content_db.execute( - "UPDATE file_optional SET time_accessed = %s WHERE ?" % now, - {"site_id": site_id, "inner_path": access_log[site_id].keys()} - ) - num += len(access_log[site_id]) - access_log.clear() - - -def processRequestLog(): - if request_log: - content_db = ContentDbPlugin.content_db - cur = content_db.getCursor() - num = 0 - cur.execute("BEGIN") - for site_id in request_log: - for inner_path, uploaded in request_log[site_id].iteritems(): - content_db.execute( - "UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded, - {"site_id": site_id, "inner_path": inner_path} - ) - num += 1 - cur.execute("END") - request_log.clear() - - -if "access_log" not in locals().keys(): # To keep between module reloads - access_log = collections.defaultdict(dict) # {site_id: {inner_path1: 1, inner_path2: 1...}} - request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}} - helper.timer(61, processAccessLog) - helper.timer(60, processRequestLog) - - -@PluginManager.registerTo("ContentManager") -class ContentManagerPlugin(object): - def __init__(self, *args, **kwargs): - self.cache_is_pinned = {} - super(ContentManagerPlugin, self).__init__(*args, **kwargs) - - def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): - if "|" in inner_path: # Big file piece - file_inner_path, file_range = inner_path.split("|") - else: - file_inner_path = inner_path - - self.contents.db.executeDelayed( - "UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 0", - {"now": int(time.time()), "site_id": self.contents.db.site_ids[self.site.address], "inner_path": file_inner_path} - ) - - return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own) - - def optionalRemoved(self, inner_path, hash_id, size=None): - self.contents.db.execute( - "UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 1", - {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} - ) - - if self.contents.db.cur.cursor.rowcount > 0: - back = super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size) - # Re-add to hashfield if we have other file with the same hash_id - if self.isDownloaded(hash_id=hash_id, force_check_db=True): - self.hashfield.appendHashId(hash_id) - return back - - def isDownloaded(self, inner_path=None, hash_id=None, force_check_db=False): - if hash_id and not force_check_db and hash_id not in self.hashfield: - return False - - if inner_path: - res = self.contents.db.execute( - "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1", - {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} - ) - else: - res = self.contents.db.execute( - "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND hash_id = :hash_id AND is_downloaded = 1 LIMIT 1", - {"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id} - ) - row = res.fetchone() - if row and row[0]: - return True - else: - return False - - def isPinned(self, inner_path): - if inner_path in self.cache_is_pinned: - self.site.log.debug("Cached is pinned: %s" % inner_path) - return self.cache_is_pinned[inner_path] - - res = self.contents.db.execute( - "SELECT is_pinned FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1", - {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} - ) - row = res.fetchone() - - if row and row[0]: - is_pinned = True - else: - is_pinned = False - - self.cache_is_pinned[inner_path] = is_pinned - self.site.log.debug("Cache set is pinned: %s %s" % (inner_path, is_pinned)) - - return is_pinned - - def setPin(self, inner_path, is_pinned): - content_db = self.contents.db - site_id = content_db.site_ids[self.site.address] - content_db.execute("UPDATE file_optional SET is_pinned = %d WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path}) - self.cache_is_pinned = {} - - def optionalDelete(self, inner_path): - if self.isPinned(inner_path): - self.site.log.debug("Skip deleting pinned optional file: %s" % inner_path) - return False - else: - return super(ContentManagerPlugin, self).optionalDelete(inner_path) - - -@PluginManager.registerTo("WorkerManager") -class WorkerManagerPlugin(object): - def doneTask(self, task): - super(WorkerManagerPlugin, self).doneTask(task) - - if task["optional_hash_id"] and not self.tasks: # Execute delayed queries immedietly after tasks finished - ContentDbPlugin.content_db.processDelayed() - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - def parsePath(self, path): - global access_log - path_parts = super(UiRequestPlugin, self).parsePath(path) - if path_parts: - site_id = ContentDbPlugin.content_db.site_ids.get(path_parts["request_address"]) - if site_id: - if ContentDbPlugin.content_db.isOptionalFile(site_id, path_parts["inner_path"]): - access_log[site_id][path_parts["inner_path"]] = 1 - return path_parts - - -@PluginManager.registerTo("FileRequest") -class FileRequestPlugin(object): - def actionGetFile(self, params): - stats = super(FileRequestPlugin, self).actionGetFile(params) - self.recordFileRequest(params["site"], params["inner_path"], stats) - return stats - - def actionStreamFile(self, params): - stats = super(FileRequestPlugin, self).actionStreamFile(params) - self.recordFileRequest(params["site"], params["inner_path"], stats) - return stats - - def recordFileRequest(self, site_address, inner_path, stats): - if not stats: - # Only track the last request of files - return False - site_id = ContentDbPlugin.content_db.site_ids[site_address] - if site_id and ContentDbPlugin.content_db.isOptionalFile(site_id, inner_path): - request_log[site_id][inner_path] += stats["bytes_sent"] - - -@PluginManager.registerTo("Site") -class SitePlugin(object): - def isDownloadable(self, inner_path): - is_downloadable = super(SitePlugin, self).isDownloadable(inner_path) - if is_downloadable: - return is_downloadable - - for path in self.settings.get("optional_help", {}).iterkeys(): - if inner_path.startswith(path): - return True - - return False - - def fileForgot(self, inner_path): - if "|" in inner_path and self.content_manager.isPinned(re.sub(r"\|.*", "", inner_path)): - self.log.debug("File %s is pinned, no fileForgot" % inner_path) - return False - else: - return super(SitePlugin, self).fileForgot(inner_path) - - def fileDone(self, inner_path): - if "|" in inner_path and self.bad_files.get(inner_path, 0) > 5: # Idle optional file done - inner_path_file = re.sub(r"\|.*", "", inner_path) - num_changed = 0 - for key, val in self.bad_files.items(): - if key.startswith(inner_path_file) and val > 1: - self.bad_files[key] = 1 - num_changed += 1 - self.log.debug("Idle optional file piece done, changed retry number of %s pieces." % num_changed) - if num_changed: - gevent.spawn(self.retryBadFiles) - - return super(SitePlugin, self).fileDone(inner_path) - - -@PluginManager.registerTo("ConfigPlugin") -class ConfigPlugin(object): - def createArguments(self): - group = self.parser.add_argument_group("OptionalManager plugin") - group.add_argument('--optional_limit', help='Limit total size of optional files', default="10%", metavar="GB or free space %") - group.add_argument('--optional_limit_exclude_minsize', help='Exclude files larger than this limit from optional size limit calculation', default=20, metavar="MB", type=int) - - return super(ConfigPlugin, self).createArguments() diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py deleted file mode 100644 index 00a5fcb7..00000000 --- a/plugins/OptionalManager/Test/TestOptionalManager.py +++ /dev/null @@ -1,148 +0,0 @@ -import hashlib -import os -import copy -import json -from cStringIO import StringIO - -import pytest - -from OptionalManager import OptionalManagerPlugin -from util import helper -from Crypt import CryptBitcoin - - -@pytest.mark.usefixtures("resetSettings") -class TestOptionalManager: - def testDbFill(self, site): - contents = site.content_manager.contents - assert len(site.content_manager.hashfield) > 0 - assert contents.db.execute("SELECT COUNT(*) FROM file_optional WHERE is_downloaded = 1").fetchone()[0] == len(site.content_manager.hashfield) - - def testSetContent(self, site): - contents = site.content_manager.contents - - # Add new file - new_content = copy.deepcopy(contents["content.json"]) - new_content["files_optional"]["testfile"] = { - "size": 1234, - "sha512": "aaaabbbbcccc" - } - num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] - contents["content.json"] = new_content - assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] > num_optional_files_before - - # Remove file - new_content = copy.deepcopy(contents["content.json"]) - del new_content["files_optional"]["testfile"] - num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] - contents["content.json"] = new_content - assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before - - def testDeleteContent(self, site): - contents = site.content_manager.contents - num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] - del contents["content.json"] - assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before - - def testVerifyFiles(self, site): - contents = site.content_manager.contents - - # Add new file - new_content = copy.deepcopy(contents["content.json"]) - new_content["files_optional"]["testfile"] = { - "size": 1234, - "sha512": "aaaabbbbcccc" - } - contents["content.json"] = new_content - file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() - assert not file_row["is_downloaded"] - - # Write file from outside of ZeroNet - site.storage.open("testfile", "wb").write("A" * 1234) # For quick check hash does not matter only file size - - hashfield_len_before = len(site.content_manager.hashfield) - site.storage.verifyFiles(quick_check=True) - assert len(site.content_manager.hashfield) == hashfield_len_before + 1 - - file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() - assert file_row["is_downloaded"] - - # Delete file outside of ZeroNet - site.storage.delete("testfile") - site.storage.verifyFiles(quick_check=True) - file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() - assert not file_row["is_downloaded"] - - def testVerifyFilesSameHashId(self, site): - contents = site.content_manager.contents - - new_content = copy.deepcopy(contents["content.json"]) - - # Add two files with same hashid (first 4 character) - new_content["files_optional"]["testfile1"] = { - "size": 1234, - "sha512": "aaaabbbbcccc" - } - new_content["files_optional"]["testfile2"] = { - "size": 2345, - "sha512": "aaaabbbbdddd" - } - contents["content.json"] = new_content - - assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd") - - # Write files from outside of ZeroNet (For quick check hash does not matter only file size) - site.storage.open("testfile1", "wb").write("A" * 1234) - site.storage.open("testfile2", "wb").write("B" * 2345) - - site.storage.verifyFiles(quick_check=True) - - # Make sure that both is downloaded - assert site.content_manager.isDownloaded("testfile1") - assert site.content_manager.isDownloaded("testfile2") - assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") in site.content_manager.hashfield - - # Delete one of the files - site.storage.delete("testfile1") - site.storage.verifyFiles(quick_check=True) - assert not site.content_manager.isDownloaded("testfile1") - assert site.content_manager.isDownloaded("testfile2") - assert site.content_manager.hashfield.getHashId("aaaabbbbdddd") in site.content_manager.hashfield - - def testIsPinned(self, site): - assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png") - site.content_manager.setPin("data/img/zerotalk-upvote.png", True) - assert site.content_manager.isPinned("data/img/zerotalk-upvote.png") - - assert len(site.content_manager.cache_is_pinned) == 1 - site.content_manager.cache_is_pinned = {} - assert site.content_manager.isPinned("data/img/zerotalk-upvote.png") - - def testBigfilePieceReset(self, site): - site.bad_files = { - "data/fake_bigfile.mp4|0-1024": 10, - "data/fake_bigfile.mp4|1024-2048": 10, - "data/fake_bigfile.mp4|2048-3064": 10 - } - site.onFileDone("data/fake_bigfile.mp4|0-1024") - assert site.bad_files["data/fake_bigfile.mp4|1024-2048"] == 1 - assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1 - - def testOptionalDelete(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" - contents = site.content_manager.contents - - site.content_manager.setPin("data/img/zerotalk-upvote.png", True) - site.content_manager.setPin("data/img/zeroid.png", False) - new_content = copy.deepcopy(contents["content.json"]) - del new_content["files_optional"]["data/img/zerotalk-upvote.png"] - del new_content["files_optional"]["data/img/zeroid.png"] - - assert site.storage.isFile("data/img/zerotalk-upvote.png") - assert site.storage.isFile("data/img/zeroid.png") - - site.storage.writeJson("content.json", new_content) - site.content_manager.loadContent("content.json", force=True) - - assert not site.storage.isFile("data/img/zeroid.png") - assert site.storage.isFile("data/img/zerotalk-upvote.png") diff --git a/plugins/OptionalManager/Test/conftest.py b/plugins/OptionalManager/Test/conftest.py deleted file mode 100644 index 8c1df5b2..00000000 --- a/plugins/OptionalManager/Test/conftest.py +++ /dev/null @@ -1 +0,0 @@ -from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/OptionalManager/Test/pytest.ini b/plugins/OptionalManager/Test/pytest.ini deleted file mode 100644 index d09210d1..00000000 --- a/plugins/OptionalManager/Test/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = Test*.py -addopts = -rsxX -v --durations=6 -markers = - webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py deleted file mode 100644 index 879fb0ad..00000000 --- a/plugins/OptionalManager/UiWebsocketPlugin.py +++ /dev/null @@ -1,383 +0,0 @@ -import re -import time -import cgi - -import gevent - -from Plugin import PluginManager -from Config import config -from util import helper -from Translate import Translate - -if "_" not in locals(): - _ = Translate("plugins/OptionalManager/languages/") - -bigfile_sha512_cache = {} - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def __init__(self, *args, **kwargs): - self.time_peer_numbers_updated = 0 - super(UiWebsocketPlugin, self).__init__(*args, **kwargs) - - def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): - # Add file to content.db and set it as pinned - content_db = self.site.content_manager.contents.db - content_inner_dir = helper.getDirname(inner_path) - content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time() - if len(content_db.my_optional_files) > 50: # Keep only last 50 - oldest_key = min( - content_db.my_optional_files.iterkeys(), - key=(lambda key: content_db.my_optional_files[key]) - ) - del content_db.my_optional_files[oldest_key] - - return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs) - - def updatePeerNumbers(self): - self.site.updateHashfield() - content_db = self.site.content_manager.contents.db - content_db.updatePeerNumbers() - self.site.updateWebsocket(peernumber_updated=True) - - def addBigfileInfo(self, row): - global bigfile_sha512_cache - - content_db = self.site.content_manager.contents.db - site = content_db.sites[row["address"]] - if not site.settings.get("has_bigfile"): - return False - - file_key = row["address"] + "/" + row["inner_path"] - sha512 = bigfile_sha512_cache.get(file_key) - file_info = None - if not sha512: - file_info = site.content_manager.getFileInfo(row["inner_path"]) - if not file_info or not file_info.get("piece_size"): - return False - sha512 = file_info["sha512"] - bigfile_sha512_cache[file_key] = sha512 - - if sha512 in site.storage.piecefields: - piecefield = site.storage.piecefields[sha512].tostring() - else: - piecefield = None - - if piecefield: - row["pieces"] = len(piecefield) - row["pieces_downloaded"] = piecefield.count("1") - row["downloaded_percent"] = 100 * row["pieces_downloaded"] / row["pieces"] - if row["pieces_downloaded"]: - if not file_info: - file_info = site.content_manager.getFileInfo(row["inner_path"]) - row["bytes_downloaded"] = row["pieces_downloaded"] * file_info.get("piece_size", 0) - else: - row["bytes_downloaded"] = 0 - - row["is_downloading"] = bool(next((inner_path for inner_path in site.bad_files if inner_path.startswith(row["inner_path"])), False)) - - # Add leech / seed stats - row["peer_seed"] = 0 - row["peer_leech"] = 0 - for peer in site.peers.itervalues(): - if not peer.time_piecefields_updated or sha512 not in peer.piecefields: - continue - peer_piecefield = peer.piecefields[sha512].tostring() - if not peer_piecefield: - continue - if peer_piecefield == "1" * len(peer_piecefield): - row["peer_seed"] += 1 - else: - row["peer_leech"] += 1 - - # Add myself - if piecefield: - if row["pieces_downloaded"] == row["pieces"]: - row["peer_seed"] += 1 - else: - row["peer_leech"] += 1 - - return True - - # Optional file functions - - def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded"): - if not address: - address = self.site.address - - # Update peer numbers if necessary - content_db = self.site.content_manager.contents.db - if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5: - # Start in new thread to avoid blocking - self.time_peer_numbers_updated = time.time() - gevent.spawn(self.updatePeerNumbers) - - if address == "all" and "ADMIN" not in self.permissions: - return self.response(to, {"error": "Forbidden"}) - - if not self.hasSitePermission(address): - return self.response(to, {"error": "Forbidden"}) - - if not all([re.match("^[a-z_*/+-]+( DESC| ASC|)$", part.strip()) for part in orderby.split(",")]): - return self.response(to, "Invalid order_by") - - if type(limit) != int: - return self.response(to, "Invalid limit") - - back = [] - content_db = self.site.content_manager.contents.db - - wheres = {} - wheres_raw = [] - if "bigfile" in filter: - wheres["size >"] = 1024 * 1024 * 10 - - if "not_downloaded" in filter: - wheres["is_downloaded"] = 0 - elif "downloaded" in filter: - wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)") - - if "pinned" in filter: - wheres["is_pinned"] = 1 - - if address == "all": - join = "LEFT JOIN site USING (site_id)" - else: - wheres["site_id"] = content_db.site_ids[address] - join = "" - - if wheres_raw: - query_wheres_raw = "AND" + " AND ".join(wheres_raw) - else: - query_wheres_raw = "" - - query = "SELECT * FROM file_optional %s WHERE ? %s ORDER BY %s LIMIT %s" % (join, query_wheres_raw, orderby, limit) - - for row in content_db.execute(query, wheres): - row = dict(row) - if address != "all": - row["address"] = address - - if row["size"] > 1024 * 1024: - has_info = self.addBigfileInfo(row) - else: - has_info = False - - if not has_info: - if row["is_downloaded"]: - row["bytes_downloaded"] = row["size"] - row["downloaded_percent"] = 100 - else: - row["bytes_downloaded"] = 0 - row["downloaded_percent"] = 0 - - back.append(row) - self.response(to, back) - - def actionOptionalFileInfo(self, to, inner_path): - content_db = self.site.content_manager.contents.db - site_id = content_db.site_ids[self.site.address] - - # Update peer numbers if necessary - if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5: - # Start in new thread to avoid blocking - self.time_peer_numbers_updated = time.time() - gevent.spawn(self.updatePeerNumbers) - - query = "SELECT * FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1" - res = content_db.execute(query, {"site_id": site_id, "inner_path": inner_path}) - row = next(res, None) - if row: - row = dict(row) - if row["size"] > 1024 * 1024: - row["address"] = self.site.address - self.addBigfileInfo(row) - self.response(to, row) - else: - self.response(to, None) - - def setPin(self, inner_path, is_pinned, address=None): - if not address: - address = self.site.address - - if not self.hasSitePermission(address): - return {"error": "Forbidden"} - - site = self.server.sites[address] - site.content_manager.setPin(inner_path, is_pinned) - - return "ok" - - def actionOptionalFilePin(self, to, inner_path, address=None): - if type(inner_path) is not list: - inner_path = [inner_path] - back = self.setPin(inner_path, 1, address) - num_file = len(inner_path) - if back == "ok": - if num_file == 1: - self.cmd("notification", ["done", _["Pinned %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) - else: - self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000]) - self.response(to, back) - - def actionOptionalFileUnpin(self, to, inner_path, address=None): - if type(inner_path) is not list: - inner_path = [inner_path] - back = self.setPin(inner_path, 0, address) - num_file = len(inner_path) - if back == "ok": - if num_file == 1: - self.cmd("notification", ["done", _["Removed pin from %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) - else: - self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000]) - self.response(to, back) - - def actionOptionalFileDelete(self, to, inner_path, address=None): - if not address: - address = self.site.address - - if not self.hasSitePermission(address): - return self.response(to, {"error": "Forbidden"}) - - site = self.server.sites[address] - - content_db = site.content_manager.contents.db - site_id = content_db.site_ids[site.address] - - res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path, "is_downloaded": 1}) - row = next(res, None) - - if not row: - return self.response(to, {"error": "Not found in content.db"}) - - removed = site.content_manager.optionalRemoved(inner_path, row["hash_id"], row["size"]) - # if not removed: - # return self.response(to, {"error": "Not found in hash_id: %s" % row["hash_id"]}) - - content_db.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"site_id": site_id, "inner_path": inner_path}) - - try: - site.storage.delete(inner_path) - except Exception as err: - return self.response(to, {"error": "File delete error: %s" % err}) - site.updateWebsocket(file_delete=inner_path) - - if inner_path in site.content_manager.cache_is_pinned: - site.content_manager.cache_is_pinned = {} - - self.response(to, "ok") - - # Limit functions - - def actionOptionalLimitStats(self, to): - if "ADMIN" not in self.site.settings["permissions"]: - return self.response(to, "Forbidden") - - back = {} - back["limit"] = config.optional_limit - back["used"] = self.site.content_manager.contents.db.getOptionalUsedBytes() - back["free"] = helper.getFreeSpace() - - self.response(to, back) - - def actionOptionalLimitSet(self, to, limit): - if "ADMIN" not in self.site.settings["permissions"]: - return self.response(to, {"error": "Forbidden"}) - config.optional_limit = re.sub("\.0+$", "", limit) # Remove unnecessary digits from end - config.saveValue("optional_limit", limit) - self.response(to, "ok") - - # Distribute help functions - - def actionOptionalHelpList(self, to, address=None): - if not address: - address = self.site.address - - if not self.hasSitePermission(address): - return self.response(to, {"error": "Forbidden"}) - - site = self.server.sites[address] - - self.response(to, site.settings.get("optional_help", {})) - - def actionOptionalHelp(self, to, directory, title, address=None): - if not address: - address = self.site.address - - if not self.hasSitePermission(address): - return self.response(to, {"error": "Forbidden"}) - - site = self.server.sites[address] - content_db = site.content_manager.contents.db - site_id = content_db.site_ids[address] - - if "optional_help" not in site.settings: - site.settings["optional_help"] = {} - - stats = content_db.execute( - "SELECT COUNT(*) AS num, SUM(size) AS size FROM file_optional WHERE site_id = :site_id AND inner_path LIKE :inner_path", - {"site_id": site_id, "inner_path": directory + "%"} - ).fetchone() - stats = dict(stats) - - if not stats["size"]: - stats["size"] = 0 - if not stats["num"]: - stats["num"] = 0 - - self.cmd("notification", [ - "done", - _["You started to help distribute %s.
Directory: %s"] % - (cgi.escape(title), cgi.escape(directory)), - 10000 - ]) - - site.settings["optional_help"][directory] = title - - self.response(to, dict(stats)) - - def actionOptionalHelpRemove(self, to, directory, address=None): - if not address: - address = self.site.address - - if not self.hasSitePermission(address): - return self.response(to, {"error": "Forbidden"}) - - site = self.server.sites[address] - - try: - del site.settings["optional_help"][directory] - self.response(to, "ok") - except Exception: - self.response(to, {"error": "Not found"}) - - def cbOptionalHelpAll(self, to, site, value): - site.settings["autodownloadoptional"] = value - self.response(to, value) - - def actionOptionalHelpAll(self, to, value, address=None): - if not address: - address = self.site.address - - if not self.hasSitePermission(address): - return self.response(to, {"error": "Forbidden"}) - - site = self.server.sites[address] - - if value: - if "ADMIN" in self.site.settings["permissions"]: - self.cbOptionalHelpAll(to, site, True) - else: - site_title = site.content_manager.contents["content.json"].get("title", address) - self.cmd( - "confirm", - [ - _["Help distribute all new optional files on site %s"] % cgi.escape(site_title), - _["Yes, I want to help!"] - ], - lambda (res): self.cbOptionalHelpAll(to, site, True) - ) - else: - site.settings["autodownloadoptional"] = False - self.response(to, False) diff --git a/plugins/OptionalManager/__init__.py b/plugins/OptionalManager/__init__.py deleted file mode 100644 index 02969bba..00000000 --- a/plugins/OptionalManager/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import OptionalManagerPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/languages/es.json b/plugins/OptionalManager/languages/es.json deleted file mode 100644 index 32ae46ae..00000000 --- a/plugins/OptionalManager/languages/es.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "Pinned %s files": "Archivos %s fijados", - "Removed pin from %s files": "Archivos %s que no estan fijados", - "You started to help distribute %s.
Directory: %s": "Tu empezaste a ayudar a distribuir %s.
Directorio: %s", - "Help distribute all new optional files on site %s": "Ayude a distribuir todos los archivos opcionales en el sitio %s", - "Yes, I want to help!": "¡Si, yo quiero ayudar!" -} diff --git a/plugins/OptionalManager/languages/fr.json b/plugins/OptionalManager/languages/fr.json deleted file mode 100644 index 47a563dc..00000000 --- a/plugins/OptionalManager/languages/fr.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "Pinned %s files": "Fichiers %s épinglés", - "Removed pin from %s files": "Fichiers %s ne sont plus épinglés", - "You started to help distribute %s.
Directory: %s": "Vous avez commencé à aider à distribuer %s.
Dossier : %s", - "Help distribute all new optional files on site %s": "Aider à distribuer tous les fichiers optionnels du site %s", - "Yes, I want to help!": "Oui, je veux aider !" -} diff --git a/plugins/OptionalManager/languages/hu.json b/plugins/OptionalManager/languages/hu.json deleted file mode 100644 index 7a23b86c..00000000 --- a/plugins/OptionalManager/languages/hu.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "Pinned %s files": "%s fájl rögzítve", - "Removed pin from %s files": "%s fájl rögzítés eltávolítva", - "You started to help distribute %s.
Directory: %s": "Új segítség a terjesztésben: %s.
Könyvtár: %s", - "Help distribute all new optional files on site %s": "Segítség az összes új opcionális fájl terjesztésében az %s oldalon", - "Yes, I want to help!": "Igen, segíteni akarok!" -} diff --git a/plugins/OptionalManager/languages/pt-br.json b/plugins/OptionalManager/languages/pt-br.json deleted file mode 100644 index 21d90cc0..00000000 --- a/plugins/OptionalManager/languages/pt-br.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "Pinned %s files": "Arquivos %s fixados", - "Removed pin from %s files": "Arquivos %s não estão fixados", - "You started to help distribute %s.
Directory: %s": "Você começou a ajudar a distribuir %s.
Pasta: %s", - "Help distribute all new optional files on site %s": "Ajude a distribuir todos os novos arquivos opcionais no site %s", - "Yes, I want to help!": "Sim, eu quero ajudar!" -} diff --git a/plugins/OptionalManager/languages/zh-tw.json b/plugins/OptionalManager/languages/zh-tw.json deleted file mode 100644 index dfa9eaf3..00000000 --- a/plugins/OptionalManager/languages/zh-tw.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "Pinned %s files": "已固定 %s 個檔", - "Removed pin from %s files": "已解除固定 %s 個檔", - "You started to help distribute %s.
Directory: %s": "你已經開始幫助分發 %s
目錄:%s", - "Help distribute all new optional files on site %s": "你想要幫助分發 %s 網站的所有檔嗎?", - "Yes, I want to help!": "是,我想要幫助!" -} diff --git a/plugins/OptionalManager/languages/zh.json b/plugins/OptionalManager/languages/zh.json deleted file mode 100644 index ae18118e..00000000 --- a/plugins/OptionalManager/languages/zh.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "Pinned %s files": "已固定 %s 个文件", - "Removed pin from %s files": "已解除固定 %s 个文件", - "You started to help distribute %s.
Directory: %s": "您已经开始帮助分发 %s
目录:%s", - "Help distribute all new optional files on site %s": "您想要帮助分发 %s 站点的所有文件吗?", - "Yes, I want to help!": "是,我想要帮助!" -} diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py deleted file mode 100644 index 241b5c58..00000000 --- a/plugins/PeerDb/PeerDbPlugin.py +++ /dev/null @@ -1,103 +0,0 @@ -import time -import sqlite3 -import random -import atexit - -import gevent -from Plugin import PluginManager - - -@PluginManager.registerTo("ContentDb") -class ContentDbPlugin(object): - def __init__(self, *args, **kwargs): - atexit.register(self.saveAllPeers) - super(ContentDbPlugin, self).__init__(*args, **kwargs) - - def getSchema(self): - schema = super(ContentDbPlugin, self).getSchema() - - schema["tables"]["peer"] = { - "cols": [ - ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"], - ["address", "TEXT NOT NULL"], - ["port", "INTEGER NOT NULL"], - ["hashfield", "BLOB"], - ["reputation", "INTEGER NOT NULL"], - ["time_added", "INTEGER NOT NULL"], - ["time_found", "INTEGER NOT NULL"] - ], - "indexes": [ - "CREATE UNIQUE INDEX peer_key ON peer (site_id, address, port)" - ], - "schema_changed": 2 - } - - return schema - - def loadPeers(self, site): - s = time.time() - site_id = self.site_ids.get(site.address) - res = self.execute("SELECT * FROM peer WHERE site_id = :site_id", {"site_id": site_id}) - num = 0 - num_hashfield = 0 - for row in res: - peer = site.addPeer(str(row["address"]), row["port"]) - if not peer: # Already exist - continue - if row["hashfield"]: - peer.hashfield.replaceFromString(row["hashfield"]) - num_hashfield += 1 - peer.time_added = row["time_added"] - peer.time_found = row["time_found"] - peer.reputation = row["reputation"] - if row["address"].endswith(".onion"): - peer.reputation = peer.reputation / 2 - 1 # Onion peers less likely working - num += 1 - if num_hashfield: - site.content_manager.has_optional_files = True - site.log.debug("%s peers (%s with hashfield) loaded in %.3fs" % (num, num_hashfield, time.time() - s)) - - def iteratePeers(self, site): - site_id = self.site_ids.get(site.address) - for key, peer in site.peers.iteritems(): - address, port = key.rsplit(":", 1) - if peer.has_hashfield: - hashfield = sqlite3.Binary(peer.hashfield.tostring()) - else: - hashfield = "" - yield (site_id, address, port, hashfield, peer.reputation, int(peer.time_added), int(peer.time_found)) - - def savePeers(self, site, spawn=False): - if spawn: - # Save peers every hour (+random some secs to not update very site at same time) - gevent.spawn_later(60 * 60 + random.randint(0, 60), self.savePeers, site, spawn=True) - if not site.peers: - site.log.debug("Peers not saved: No peers found") - return - s = time.time() - site_id = self.site_ids.get(site.address) - cur = self.getCursor() - cur.execute("BEGIN") - try: - cur.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id}) - cur.cursor.executemany( - "INSERT INTO peer (site_id, address, port, hashfield, reputation, time_added, time_found) VALUES (?, ?, ?, ?, ?, ?, ?)", - self.iteratePeers(site) - ) - except Exception as err: - site.log.error("Save peer error: %s" % err) - finally: - cur.execute("END") - site.log.debug("Peers saved in %.3fs" % (time.time() - s)) - - def initSite(self, site): - super(ContentDbPlugin, self).initSite(site) - gevent.spawn_later(0.5, self.loadPeers, site) - gevent.spawn_later(60*60, self.savePeers, site, spawn=True) - - def saveAllPeers(self): - for site in self.sites.values(): - try: - self.savePeers(site) - except Exception, err: - site.log.error("Save peer error: %s" % err) diff --git a/plugins/PeerDb/__init__.py b/plugins/PeerDb/__init__.py deleted file mode 100644 index 967561dc..00000000 --- a/plugins/PeerDb/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -import PeerDbPlugin - diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py deleted file mode 100644 index c56a2cb4..00000000 --- a/plugins/Sidebar/SidebarPlugin.py +++ /dev/null @@ -1,760 +0,0 @@ -import re -import os -import cgi -import sys -import math -import time -import json -try: - import cStringIO as StringIO -except: - import StringIO - -import gevent - -from Config import config -from Plugin import PluginManager -from Debug import Debug -from Translate import Translate -from util import helper -from ZipStream import ZipStream - -plugin_dir = "plugins/Sidebar" -media_dir = plugin_dir + "/media" -sys.path.append(plugin_dir) # To able to load geoip lib - -loc_cache = {} -if "_" not in locals(): - _ = Translate(plugin_dir + "/languages/") - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - # Inject our resources to end of original file streams - def actionUiMedia(self, path): - if path == "/uimedia/all.js" or path == "/uimedia/all.css": - # First yield the original file and header - body_generator = super(UiRequestPlugin, self).actionUiMedia(path) - for part in body_generator: - yield part - - # Append our media file to the end - ext = re.match(".*(js|css)$", path).group(1) - plugin_media_file = "%s/all.%s" % (media_dir, ext) - if config.debug: - # If debugging merge *.css to all.css and *.js to all.js - from Debug import DebugMedia - DebugMedia.merge(plugin_media_file) - if ext == "js": - yield _.translateData(open(plugin_media_file).read()) - else: - for part in self.actionFile(plugin_media_file, send_header=False): - yield part - elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files - file_name = re.match(".*/(.*)", path).group(1) - plugin_media_file = "%s-globe/%s" % (media_dir, file_name) - if config.debug and path.endswith("all.js"): - # If debugging merge *.css to all.css and *.js to all.js - from Debug import DebugMedia - DebugMedia.merge(plugin_media_file) - for part in self.actionFile(plugin_media_file): - yield part - else: - for part in super(UiRequestPlugin, self).actionUiMedia(path): - yield part - - def actionZip(self): - address = self.get["address"] - site = self.server.site_manager.get(address) - if not site: - return self.error404("Site not found") - - title = site.content_manager.contents.get("content.json", {}).get("title", "").encode('ascii', 'ignore') - filename = "%s-backup-%s.zip" % (title, time.strftime("%Y-%m-%d_%H_%M")) - self.sendHeader(content_type="application/zip", extra_headers={'Content-Disposition': 'attachment; filename="%s"' % filename}) - - return self.streamZip(site.storage.getPath(".")) - - def streamZip(self, file_path): - zs = ZipStream(file_path) - while 1: - data = zs.read() - if not data: - break - yield data - - - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def sidebarRenderPeerStats(self, body, site): - connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]) - connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")]) - onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id]) - local = len([peer for peer in site.peers.values() if helper.isPrivateIp(peer.ip)]) - peers_total = len(site.peers) - - # Add myself - if site.settings["serving"]: - peers_total += 1 - if any(site.connection_server.port_opened.values()): - connectable += 1 - if site.connection_server.tor_manager.start_onions: - onion += 1 - - if peers_total: - percent_connected = float(connected) / peers_total - percent_connectable = float(connectable) / peers_total - percent_onion = float(onion) / peers_total - else: - percent_connectable = percent_connected = percent_onion = 0 - - if local: - local_html = _(u"
  • {_[Local]}:{local}
  • ") - else: - local_html = "" - - peer_ips = [peer.key for peer in site.getConnectablePeers(20, allow_private=False)] - peer_ips.sort(key=lambda peer_ip: ".onion:" in peer_ip) - copy_link = "http://127.0.0.1:43110/%s/?zeronet_peers=%s" % ( - site.content_manager.contents["content.json"].get("domain", site.address), - ",".join(peer_ips) - ) - - body.append(_(u""" -
  • - -
      -
    • -
    • -
    • -
    • -
    -
      -
    • {_[Connected]}:{connected}
    • -
    • {_[Connectable]}:{connectable}
    • -
    • {_[Onion]}:{onion}
    • - {local_html} -
    • {_[Total]}:{peers_total}
    • -
    -
  • - """.replace("{local_html}", local_html))) - - def sidebarRenderTransferStats(self, body, site): - recv = float(site.settings.get("bytes_recv", 0)) / 1024 / 1024 - sent = float(site.settings.get("bytes_sent", 0)) / 1024 / 1024 - transfer_total = recv + sent - if transfer_total: - percent_recv = recv / transfer_total - percent_sent = sent / transfer_total - else: - percent_recv = 0.5 - percent_sent = 0.5 - - body.append(_(u""" -
  • - -
      -
    • -
    • -
    -
      -
    • {_[Received]}:{recv:.2f}MB
    • -
    • {_[Sent]}:{sent:.2f}MB
    • -
    -
  • - """)) - - def sidebarRenderFileStats(self, body, site): - body.append(_(u""" -
  • - -
      - """)) - - extensions = ( - ("html", "yellow"), - ("css", "orange"), - ("js", "purple"), - ("Image", "green"), - ("json", "darkblue"), - ("User data", "blue"), - ("Other", "white"), - ("Total", "black") - ) - # Collect stats - size_filetypes = {} - size_total = 0 - contents = site.content_manager.listContents() # Without user files - for inner_path in contents: - content = site.content_manager.contents[inner_path] - if "files" not in content or content["files"] is None: - continue - for file_name, file_details in content["files"].items(): - size_total += file_details["size"] - ext = file_name.split(".")[-1] - size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"] - - # Get user file sizes - size_user_content = site.content_manager.contents.execute( - "SELECT SUM(size) + SUM(size_files) AS size FROM content WHERE ?", - {"not__inner_path": contents} - ).fetchone()["size"] - if not size_user_content: - size_user_content = 0 - size_filetypes["User data"] = size_user_content - size_total += size_user_content - - # The missing difference is content.json sizes - if "json" in size_filetypes: - size_filetypes["json"] += max(0, site.settings["size"] - size_total) - size_total = size_other = site.settings["size"] - - # Bar - for extension, color in extensions: - if extension == "Total": - continue - if extension == "Other": - size = max(0, size_other) - elif extension == "Image": - size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) - size_other -= size - else: - size = size_filetypes.get(extension, 0) - size_other -= size - if size_total == 0: - percent = 0 - else: - percent = 100 * (float(size) / size_total) - percent = math.floor(percent * 100) / 100 # Floor to 2 digits - body.append( - u"""
    • """ % - (percent, _[extension], color, _[extension]) - ) - - # Legend - body.append("
      ") - for extension, color in extensions: - if extension == "Other": - size = max(0, size_other) - elif extension == "Image": - size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) - elif extension == "Total": - size = size_total - else: - size = size_filetypes.get(extension, 0) - - if extension == "js": - title = "javascript" - else: - title = extension - - if size > 1024 * 1024 * 10: # Format as mB is more than 10mB - size_formatted = "%.0fMB" % (size / 1024 / 1024) - else: - size_formatted = "%.0fkB" % (size / 1024) - - body.append(u"
    • %s:%s
    • " % (color, _[title], size_formatted)) - - body.append("
  • ") - - def sidebarRenderSizeLimit(self, body, site): - free_space = helper.getFreeSpace() / 1024 / 1024 - size = float(site.settings["size"]) / 1024 / 1024 - size_limit = site.getSizeLimit() - percent_used = size / size_limit - - body.append(_(u""" -
  • - - MB - {_[Set]} -
  • - """)) - - def sidebarRenderOptionalFileStats(self, body, site): - size_total = float(site.settings["size_optional"]) - size_downloaded = float(site.settings["optional_downloaded"]) - - if not size_total: - return False - - percent_downloaded = size_downloaded / size_total - - size_formatted_total = size_total / 1024 / 1024 - size_formatted_downloaded = size_downloaded / 1024 / 1024 - - body.append(_(u""" -
  • - -
      -
    • -
    • -
    -
      -
    • {_[Downloaded]}:{size_formatted_downloaded:.2f}MB
    • -
    • {_[Total]}:{size_formatted_total:.2f}MB
    • -
    -
  • - """)) - - return True - - def sidebarRenderOptionalFileSettings(self, body, site): - if self.site.settings.get("autodownloadoptional"): - checked = "checked='checked'" - else: - checked = "" - - body.append(_(u""" -
  • - -
    - """)) - - autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit)) - body.append(_(u""" -
    - - MB - {_[Set]} -
    - """)) - body.append("
  • ") - - def sidebarRenderBadFiles(self, body, site): - body.append(_(u""" -
  • - -
      - """)) - - i = 0 - for bad_file, tries in site.bad_files.iteritems(): - i += 1 - body.append(_(u"""
    • {bad_filename}
    • """, { - "bad_file_path": bad_file, - "bad_filename": helper.getFilename(bad_file), - "tries": _.pluralize(tries, "{} try", "{} tries") - })) - if i > 30: - break - - if len(site.bad_files) > 30: - num_bad_files = len(site.bad_files) - 30 - body.append(_(u"""
    • {_[+ {num_bad_files} more]}
    • """, nested=True)) - - body.append(""" -
    -
  • - """) - - def sidebarRenderDbOptions(self, body, site): - if site.storage.db: - inner_path = site.storage.getInnerPath(site.storage.db.db_path) - size = float(site.storage.getSize(inner_path)) / 1024 - feeds = len(site.storage.db.schema.get("feeds", {})) - else: - inner_path = _[u"No database found"] - size = 0.0 - feeds = 0 - - body.append(_(u""" -
  • - - -
  • - """, nested=True)) - - def sidebarRenderIdentity(self, body, site): - auth_address = self.user.getAuthAddress(self.site.address, create=False) - rules = self.site.content_manager.getRules("data/users/%s/content.json" % auth_address) - if rules and rules.get("max_size"): - quota = rules["max_size"] / 1024 - try: - content = site.content_manager.contents["data/users/%s/content.json" % auth_address] - used = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) - except: - used = 0 - used = used / 1024 - else: - quota = used = 0 - - body.append(_(u""" -
  • - -
    - {auth_address} - {_[Change]} -
    -
  • - """)) - - def sidebarRenderControls(self, body, site): - auth_address = self.user.getAuthAddress(self.site.address, create=False) - if self.site.settings["serving"]: - class_pause = "" - class_resume = "hidden" - else: - class_pause = "hidden" - class_resume = "" - - body.append(_(u""" -
  • - - {_[Update]} - {_[Pause]} - {_[Resume]} - {_[Delete]} -
  • - """)) - - donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True) - site_address = self.site.address - body.append(_(u""" -
  • -
    -
    - {site_address} - """)) - if donate_key == False or donate_key == "": - pass - elif (type(donate_key) == str or type(donate_key) == unicode) and len(donate_key) > 0: - body.append(_(u""" -
    -
  • -
  • -
    -
    - {donate_key} - """)) - else: - body.append(_(u""" - {_[Donate]} - """)) - body.append(_(u""" -
    -
  • - """)) - - def sidebarRenderOwnedCheckbox(self, body, site): - if self.site.settings["own"]: - checked = "checked='checked'" - else: - checked = "" - - body.append(_(u""" -

    {_[This is my site]}

    -
    - """)) - - def sidebarRenderOwnSettings(self, body, site): - title = site.content_manager.contents.get("content.json", {}).get("title", "") - description = site.content_manager.contents.get("content.json", {}).get("description", "") - - body.append(_(u""" -
  • - - -
  • - -
  • - - -
  • - -
  • - {_[Save site settings]} -
  • - """)) - - def sidebarRenderContents(self, body, site): - has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey")) - if has_privatekey: - tag_privatekey = _(u"{_[Private key saved.]} {_[Forgot]}") - else: - tag_privatekey = _(u"{_[Add saved private key]}") - - body.append(_(u""" -
  • - - """.replace("{tag_privatekey}", tag_privatekey))) - - # Choose content you want to sign - body.append(_(u""" - - """)) - - contents = ["content.json"] - contents += site.content_manager.contents.get("content.json", {}).get("includes", {}).keys() - body.append(_(u"
    {_[Choose]}: ")) - for content in contents: - body.append(_("{content} ")) - body.append("
    ") - body.append("
  • ") - - def actionSidebarGetHtmlTag(self, to): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - - site = self.site - - body = [] - - body.append("
    ") - body.append("×") - body.append("

    %s

    " % cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True)) - - body.append("
    ") - - body.append("
      ") - - self.sidebarRenderPeerStats(body, site) - self.sidebarRenderTransferStats(body, site) - self.sidebarRenderFileStats(body, site) - self.sidebarRenderSizeLimit(body, site) - has_optional = self.sidebarRenderOptionalFileStats(body, site) - if has_optional: - self.sidebarRenderOptionalFileSettings(body, site) - self.sidebarRenderDbOptions(body, site) - self.sidebarRenderIdentity(body, site) - self.sidebarRenderControls(body, site) - if site.bad_files: - self.sidebarRenderBadFiles(body, site) - - self.sidebarRenderOwnedCheckbox(body, site) - body.append("
      ") - self.sidebarRenderOwnSettings(body, site) - self.sidebarRenderContents(body, site) - body.append("
      ") - body.append("
    ") - body.append("
    ") - - body.append("") - - self.response(to, "".join(body)) - - def downloadGeoLiteDb(self, db_path): - import urllib - import gzip - import shutil - from util import helper - - self.log.info("Downloading GeoLite2 City database...") - self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], 0]) - db_urls = [ - "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", - "https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz" - ] - for db_url in db_urls: - try: - # Download - response = helper.httpRequest(db_url) - data_size = response.getheader('content-length') - data_recv = 0 - data = StringIO.StringIO() - while True: - buff = response.read(1024 * 512) - if not buff: - break - data.write(buff) - data_recv += 1024 * 512 - if data_size: - progress = int(float(data_recv) / int(data_size) * 100) - self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], progress]) - self.log.info("GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell()) - data.seek(0) - - # Unpack - with gzip.GzipFile(fileobj=data) as gzip_file: - shutil.copyfileobj(gzip_file, open(db_path, "wb")) - - self.cmd("progress", ["geolite-info", _["GeoLite2 City database downloaded!"], 100]) - time.sleep(2) # Wait for notify animation - return True - except Exception as err: - self.log.error("Error downloading %s: %s" % (db_url, err)) - pass - self.cmd("progress", [ - "geolite-info", - _["GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}"].format(err, db_urls[0]), - -100 - ]) - - def getLoc(self, geodb, ip): - global loc_cache - - if ip in loc_cache: - return loc_cache[ip] - else: - try: - loc_data = geodb.get(ip) - except: - loc_data = None - - if not loc_data or "location" not in loc_data: - loc_cache[ip] = None - return None - - loc = { - "lat": loc_data["location"]["latitude"], - "lon": loc_data["location"]["longitude"], - } - if "city" in loc_data: - loc["city"] = loc_data["city"]["names"]["en"] - - if "country" in loc_data: - loc["country"] = loc_data["country"]["names"]["en"] - - loc_cache[ip] = loc - return loc - - def getPeerLocations(self, peers): - import maxminddb - db_path = config.data_dir + '/GeoLite2-City.mmdb' - if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: - if not self.downloadGeoLiteDb(db_path): - return False - geodb = maxminddb.open_database(db_path) - - peers = peers.values() - # Place bars - peer_locations = [] - placed = {} # Already placed bars here - for peer in peers: - # Height of bar - if peer.connection and peer.connection.last_ping_delay: - ping = round(peer.connection.last_ping_delay * 1000) - else: - ping = None - loc = self.getLoc(geodb, peer.ip) - - if not loc: - continue - # Create position array - lat, lon = loc["lat"], loc["lon"] - latlon = "%s,%s" % (lat, lon) - if latlon in placed and helper.getIpType(peer.ip) == "ipv4": # Dont place more than 1 bar to same place, fake repos using ip address last two part - lat += float(128 - int(peer.ip.split(".")[-2])) / 50 - lon += float(128 - int(peer.ip.split(".")[-1])) / 50 - latlon = "%s,%s" % (lat, lon) - placed[latlon] = True - peer_location = {} - peer_location.update(loc) - peer_location["lat"] = lat - peer_location["lon"] = lon - peer_location["ping"] = ping - - peer_locations.append(peer_location) - - # Append myself - for ip in self.site.connection_server.ip_external_list: - my_loc = self.getLoc(geodb, ip) - if my_loc: - my_loc["ping"] = 0 - peer_locations.append(my_loc) - - return peer_locations - - - def actionSidebarGetPeers(self, to): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - try: - peer_locations = self.getPeerLocations(self.site.peers) - globe_data = [] - ping_times = [ - peer_location["ping"] - for peer_location in peer_locations - if peer_location["ping"] - ] - if ping_times: - ping_avg = sum(ping_times) / float(len(ping_times)) - else: - ping_avg = 0 - - for peer_location in peer_locations: - if peer_location["ping"] == 0: # Me - height = -0.135 - elif peer_location["ping"]: - height = min(0.20, math.log(1 + peer_location["ping"] / ping_avg, 300)) - else: - height = -0.03 - - globe_data += [peer_location["lat"], peer_location["lon"], height] - - self.response(to, globe_data) - except Exception, err: - self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err)) - self.response(to, {"error": err}) - - def actionSiteSetOwned(self, to, owned): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - - if self.site.address == config.updatesite: - return self.response(to, "You can't change the ownership of the updater site") - - self.site.settings["own"] = bool(owned) - self.site.updateWebsocket(owned=owned) - - def actionUserSetSitePrivatekey(self, to, privatekey): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - - site_data = self.user.sites[self.site.address] - site_data["privatekey"] = privatekey - self.site.updateWebsocket(set_privatekey=bool(privatekey)) - - return "ok" - - def actionSiteSetAutodownloadoptional(self, to, owned): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - - self.site.settings["autodownloadoptional"] = bool(owned) - self.site.bad_files = {} - gevent.spawn(self.site.update, check_files=True) - self.site.worker_manager.removeSolvedFileTasks() - - def actionDbReload(self, to): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - - self.site.storage.closeDb() - self.site.storage.getDb() - - return self.response(to, "ok") - - def actionDbRebuild(self, to): - permissions = self.getPermissions(to) - if "ADMIN" not in permissions: - return self.response(to, "You don't have permission to run this command") - - self.site.storage.rebuildDb() - - return self.response(to, "ok") diff --git a/plugins/Sidebar/ZipStream.py b/plugins/Sidebar/ZipStream.py deleted file mode 100644 index ea6283e4..00000000 --- a/plugins/Sidebar/ZipStream.py +++ /dev/null @@ -1,59 +0,0 @@ -import cStringIO as StringIO -import os -import zipfile - -class ZipStream(file): - def __init__(self, dir_path): - self.dir_path = dir_path - self.pos = 0 - self.buff_pos = 0 - self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) - self.buff = StringIO.StringIO() - self.file_list = self.getFileList() - - def getFileList(self): - for root, dirs, files in os.walk(self.dir_path): - for file in files: - file_path = root + "/" + file - relative_path = os.path.join(os.path.relpath(root, self.dir_path), file) - yield file_path, relative_path - self.zf.close() - - def read(self, size=60 * 1024): - for file_path, relative_path in self.file_list: - self.zf.write(file_path, relative_path) - if self.buff.tell() >= size: - break - self.buff.seek(0) - back = self.buff.read() - self.buff.truncate(0) - self.buff.seek(0) - self.buff_pos += len(back) - return back - - def write(self, data): - self.pos += len(data) - self.buff.write(data) - - def tell(self): - return self.pos - - def seek(self, pos, whence=0): - if pos >= self.buff_pos: - self.buff.seek(pos - self.buff_pos, whence) - self.pos = pos - - def flush(self): - pass - - -if __name__ == "__main__": - zs = ZipStream(".") - out = open("out.zip", "wb") - while 1: - data = zs.read() - print("Write %s" % len(data)) - if not data: - break - out.write(data) - out.close() diff --git a/plugins/Sidebar/__init__.py b/plugins/Sidebar/__init__.py deleted file mode 100644 index 8b61cb4a..00000000 --- a/plugins/Sidebar/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import SidebarPlugin \ No newline at end of file diff --git a/plugins/Sidebar/languages/da.json b/plugins/Sidebar/languages/da.json deleted file mode 100644 index a421292c..00000000 --- a/plugins/Sidebar/languages/da.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "Peers": "Klienter", - "Connected": "Forbundet", - "Connectable": "Mulige", - "Connectable peers": "Mulige klienter", - - "Data transfer": "Data overførsel", - "Received": "Modtaget", - "Received bytes": "Bytes modtaget", - "Sent": "Sendt", - "Sent bytes": "Bytes sendt", - - "Files": "Filer", - "Total": "I alt", - "Image": "Image", - "Other": "Andet", - "User data": "Bruger data", - - "Size limit": "Side max størrelse", - "limit used": "brugt", - "free space": "fri", - "Set": "Opdater", - - "Optional files": "Valgfri filer", - "Downloaded": "Downloadet", - "Download and help distribute all files": "Download og hjælp med at dele filer", - "Total size": "Størrelse i alt", - "Downloaded files": "Filer downloadet", - - "Database": "Database", - "search feeds": "søgninger", - "{feeds} query": "{feeds} søgninger", - "Reload": "Genindlæs", - "Rebuild": "Genopbyg", - "No database found": "Ingen database fundet", - - "Identity address": "Autorisations ID", - "Change": "Skift", - - "Update": "Opdater", - "Pause": "Pause", - "Resume": "Aktiv", - "Delete": "Slet", - "Are you sure?": "Er du sikker?", - - "Site address": "Side addresse", - "Donate": "Doner penge", - - "Missing files": "Manglende filer", - "{} try": "{} forsøg", - "{} tries": "{} forsøg", - "+ {num_bad_files} more": "+ {num_bad_files} mere", - - "This is my site": "Dette er min side", - "Site title": "Side navn", - "Site description": "Side beskrivelse", - "Save site settings": "Gem side opsætning", - - "Content publishing": "Indhold offentliggøres", - "Choose": "Vælg", - "Sign": "Signer", - "Publish": "Offentliggør", - - "This function is disabled on this proxy": "Denne funktion er slået fra på denne ZeroNet proxyEz a funkció ki van kapcsolva ezen a proxy-n", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City database kunne ikke downloades: {}!
    Download venligst databasen manuelt og udpak i data folder:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", - "GeoLite2 City database downloaded!": "GeoLite2 City database downloadet!", - - "Are you sure?": "Er du sikker?", - "Site storage limit modified!": "Side max størrelse ændret!", - "Database schema reloaded!": "Database definition genindlæst!", - "Database rebuilding....": "Genopbygger database...", - "Database rebuilt!": "Database genopbygget!", - "Site updated!": "Side opdateret!", - "Delete this site": "Slet denne side", - "File write error: ": "Fejl ved skrivning af fil: ", - "Site settings saved!": "Side opsætning gemt!", - "Enter your private key:": "Indtast din private nøgle:", - " Signed!": " Signeret!", - "WebGL not supported": "WebGL er ikke supporteret" -} \ No newline at end of file diff --git a/plugins/Sidebar/languages/de.json b/plugins/Sidebar/languages/de.json deleted file mode 100644 index 2f5feacd..00000000 --- a/plugins/Sidebar/languages/de.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "Peers": "Peers", - "Connected": "Verbunden", - "Connectable": "Verbindbar", - "Connectable peers": "Verbindbare Peers", - - "Data transfer": "Datei Transfer", - "Received": "Empfangen", - "Received bytes": "Empfangene Bytes", - "Sent": "Gesendet", - "Sent bytes": "Gesendete Bytes", - - "Files": "Dateien", - "Total": "Gesamt", - "Image": "Bilder", - "Other": "Sonstiges", - "User data": "Nutzer Daten", - - "Size limit": "Speicher Limit", - "limit used": "Limit benutzt", - "free space": "freier Speicher", - "Set": "Setzten", - - "Optional files": "Optionale Dateien", - "Downloaded": "Heruntergeladen", - "Download and help distribute all files": "Herunterladen und helfen alle Dateien zu verteilen", - "Total size": "Gesamte Größe", - "Downloaded files": "Heruntergeladene Dateien", - - "Database": "Datenbank", - "search feeds": "Feeds durchsuchen", - "{feeds} query": "{feeds} Abfrage", - "Reload": "Neu laden", - "Rebuild": "Neu bauen", - "No database found": "Keine Datenbank gefunden", - - "Identity address": "Identitäts Adresse", - "Change": "Ändern", - - "Update": "Aktualisieren", - "Pause": "Pausieren", - "Resume": "Fortsetzen", - "Delete": "Löschen", - "Are you sure?": "Bist du sicher?", - - "Site address": "Seiten Adresse", - "Donate": "Spenden", - - "Missing files": "Fehlende Dateien", - "{} try": "{} versuch", - "{} tries": "{} versuche", - "+ {num_bad_files} more": "+ {num_bad_files} mehr", - - "This is my site": "Das ist meine Seite", - "Site title": "Seiten Titel", - "Site description": "Seiten Beschreibung", - "Save site settings": "Einstellungen der Seite speichern", - - "Content publishing": "Inhaltsveröffentlichung", - "Choose": "Wähle", - "Sign": "Signieren", - "Publish": "Veröffentlichen", - - "This function is disabled on this proxy": "Diese Funktion ist auf dieser Proxy deaktiviert", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City Datenbank Download Fehler: {}!
    Bitte manuell herunterladen und die Datei in das Datei Verzeichnis extrahieren:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "Herunterladen der GeoLite2 City Datenbank (einmalig, ~20MB)...", - "GeoLite2 City database downloaded!": "GeoLite2 City Datenbank heruntergeladen!", - - "Are you sure?": "Bist du sicher?", - "Site storage limit modified!": "Speicher Limit der Seite modifiziert!", - "Database schema reloaded!": "Datebank Schema neu geladen!", - "Database rebuilding....": "Datenbank neu bauen...", - "Database rebuilt!": "Datenbank neu gebaut!", - "Site updated!": "Seite aktualisiert!", - "Delete this site": "Diese Seite löschen", - "File write error: ": "Datei schreib fehler:", - "Site settings saved!": "Seiten Einstellungen gespeichert!", - "Enter your private key:": "Gib deinen privaten Schlüssel ein:", - " Signed!": " Signiert!", - "WebGL not supported": "WebGL nicht unterstützt" -} diff --git a/plugins/Sidebar/languages/es.json b/plugins/Sidebar/languages/es.json deleted file mode 100644 index b9e98c46..00000000 --- a/plugins/Sidebar/languages/es.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "Peers": "Pares", - "Connected": "Conectados", - "Connectable": "Conectables", - "Connectable peers": "Pares conectables", - - "Data transfer": "Transferencia de datos", - "Received": "Recibidos", - "Received bytes": "Bytes recibidos", - "Sent": "Enviados", - "Sent bytes": "Bytes envidados", - - "Files": "Ficheros", - "Total": "Total", - "Image": "Imagen", - "Other": "Otro", - "User data": "Datos del usuario", - - "Size limit": "Límite de tamaño", - "limit used": "Límite utilizado", - "free space": "Espacio libre", - "Set": "Establecer", - - "Optional files": "Ficheros opcionales", - "Downloaded": "Descargado", - "Download and help distribute all files": "Descargar y ayudar a distribuir todos los ficheros", - "Total size": "Tamaño total", - "Downloaded files": "Ficheros descargados", - - "Database": "Base de datos", - "search feeds": "Fuentes de búsqueda", - "{feeds} query": "{feeds} consulta", - "Reload": "Recargar", - "Rebuild": "Reconstruir", - "No database found": "No se ha encontrado la base de datos", - - "Identity address": "Dirección de la identidad", - "Change": "Cambiar", - - "Update": "Actualizar", - "Pause": "Pausar", - "Resume": "Reanudar", - "Delete": "Borrar", - - "Site address": "Dirección del sitio", - "Donate": "Donar", - - "Missing files": "Ficheros perdidos", - "{} try": "{} intento", - "{} tries": "{} intentos", - "+ {num_bad_files} more": "+ {num_bad_files} más", - - "This is my site": "Este es mi sitio", - "Site title": "Título del sitio", - "Site description": "Descripción del sitio", - "Save site settings": "Guardar la configuración del sitio", - - "Content publishing": "Publicación del contenido", - "Choose": "Elegir", - "Sign": "Firmar", - "Publish": "Publicar", - "This function is disabled on this proxy": "Esta función está deshabilitada en este proxy", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "¡Error de la base de datos GeoLite2: {}!
    Por favor, descárgalo manualmente y descomprime al directorio de datos:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "Descargando la base de datos de GeoLite2 (una única vez, ~20MB)...", - "GeoLite2 City database downloaded!": "¡Base de datos de GeoLite2 descargada!", - - "Are you sure?": "¿Estás seguro?", - "Site storage limit modified!": "¡Límite de almacenamiento del sitio modificado!", - "Database schema reloaded!": "¡Esquema de la base de datos recargado!", - "Database rebuilding....": "Reconstruyendo la base de datos...", - "Database rebuilt!": "¡Base de datos reconstruida!", - "Site updated!": "¡Sitio actualizado!", - "Delete this site": "Borrar este sitio", - "File write error: ": "Error de escritura de fichero:", - "Site settings saved!": "¡Configuración del sitio guardada!", - "Enter your private key:": "Introduce tu clave privada:", - " Signed!": " ¡firmado!", - "WebGL not supported": "WebGL no está soportado" -} diff --git a/plugins/Sidebar/languages/fr.json b/plugins/Sidebar/languages/fr.json deleted file mode 100644 index 5c4b3ac7..00000000 --- a/plugins/Sidebar/languages/fr.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "Peers": "Pairs", - "Connected": "Connectés", - "Connectable": "Accessibles", - "Connectable peers": "Pairs accessibles", - - "Data transfer": "Données transférées", - "Received": "Reçues", - "Received bytes": "Bytes reçus", - "Sent": "Envoyées", - "Sent bytes": "Bytes envoyés", - - "Files": "Fichiers", - "Total": "Total", - "Image": "Image", - "Other": "Autre", - "User data": "Utilisateurs", - - "Size limit": "Taille maximale", - "limit used": "utlisé", - "free space": "libre", - "Set": "Modifier", - - "Optional files": "Fichiers optionnels", - "Downloaded": "Téléchargé", - "Download and help distribute all files": "Télécharger et distribuer tous les fichiers", - "Total size": "Taille totale", - "Downloaded files": "Fichiers téléchargés", - - "Database": "Base de données", - "search feeds": "recherche", - "{feeds} query": "{feeds} requête", - "Reload": "Recharger", - "Rebuild": "Reconstruire", - "No database found": "Aucune base de données trouvée", - - "Identity address": "Adresse d'identité", - "Change": "Modifier", - - "Site control": "Opérations", - "Update": "Mettre à jour", - "Pause": "Suspendre", - "Resume": "Reprendre", - "Delete": "Supprimer", - "Are you sure?": "Êtes-vous certain?", - - "Site address": "Adresse du site", - "Donate": "Faire un don", - - "Missing files": "Fichiers manquants", - "{} try": "{} essai", - "{} tries": "{} essais", - "+ {num_bad_files} more": "+ {num_bad_files} manquants", - - "This is my site": "Ce site m'appartient", - "Site title": "Nom du site", - "Site description": "Description du site", - "Save site settings": "Enregistrer les paramètres", - - "Content publishing": "Publication du contenu", - "Choose": "Sélectionner", - "Sign": "Signer", - "Publish": "Publier", - - "This function is disabled on this proxy": "Cette fonction est désactivé sur ce proxy", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erreur au téléchargement de la base de données GeoLite2: {}!
    Téléchargez et décompressez dans le dossier data:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "Téléchargement de la base de données GeoLite2 (une seule fois, ~20MB)...", - "GeoLite2 City database downloaded!": "Base de données GeoLite2 téléchargée!", - - "Are you sure?": "Êtes-vous certain?", - "Site storage limit modified!": "Taille maximale modifiée!", - "Database schema reloaded!": "Base de données rechargée!", - "Database rebuilding....": "Reconstruction de la base de données...", - "Database rebuilt!": "Base de données reconstruite!", - "Site updated!": "Site mis à jour!", - "Delete this site": "Supprimer ce site", - "File write error: ": "Erreur à l'écriture du fichier: ", - "Site settings saved!": "Paramètres du site enregistrés!", - "Enter your private key:": "Entrez votre clé privée:", - " Signed!": " Signé!", - "WebGL not supported": "WebGL n'est pas supporté" -} diff --git a/plugins/Sidebar/languages/hu.json b/plugins/Sidebar/languages/hu.json deleted file mode 100644 index 40ed8fab..00000000 --- a/plugins/Sidebar/languages/hu.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "Peers": "Csatlakozási pontok", - "Connected": "Csaltakozva", - "Connectable": "Csatlakozható", - "Connectable peers": "Csatlakozható peer-ek", - - "Data transfer": "Adatátvitel", - "Received": "Fogadott", - "Received bytes": "Fogadott byte-ok", - "Sent": "Küldött", - "Sent bytes": "Küldött byte-ok", - - "Files": "Fájlok", - "Total": "Összesen", - "Image": "Kép", - "Other": "Egyéb", - "User data": "Felh. adat", - - "Size limit": "Méret korlát", - "limit used": "felhasznált", - "free space": "szabad hely", - "Set": "Beállít", - - "Optional files": "Opcionális fájlok", - "Downloaded": "Letöltött", - "Download and help distribute all files": "Minden opcionális fájl letöltése", - "Total size": "Teljes méret", - "Downloaded files": "Letöltve", - - "Database": "Adatbázis", - "search feeds": "Keresés források", - "{feeds} query": "{feeds} lekérdezés", - "Reload": "Újratöltés", - "Rebuild": "Újraépítés", - "No database found": "Adatbázis nem található", - - "Identity address": "Azonosító cím", - "Change": "Módosít", - - "Site control": "Oldal műveletek", - "Update": "Frissít", - "Pause": "Szünteltet", - "Resume": "Folytat", - "Delete": "Töröl", - "Are you sure?": "Biztos vagy benne?", - - "Site address": "Oldal címe", - "Donate": "Támogatás", - - "Missing files": "Hiányzó fájlok", - "{} try": "{} próbálkozás", - "{} tries": "{} próbálkozás", - "+ {num_bad_files} more": "+ még {num_bad_files} darab", - - "This is my site": "Ez az én oldalam", - "Site title": "Oldal neve", - "Site description": "Oldal leírása", - "Save site settings": "Oldal beállítások mentése", - - "Content publishing": "Tartalom publikálás", - "Choose": "Válassz", - "Sign": "Aláírás", - "Publish": "Publikálás", - - "This function is disabled on this proxy": "Ez a funkció ki van kapcsolva ezen a proxy-n", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 város adatbázis letöltési hiba: {}!
    A térképhez töltsd le és csomagold ki a data könyvtárba:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", - "GeoLite2 City database downloaded!": "GeoLite2 város adatbázis letöltve!", - - "Are you sure?": "Biztos vagy benne?", - "Site storage limit modified!": "Az oldalt méret korlát módosítva!", - "Database schema reloaded!": "Adatbázis séma újratöltve!", - "Database rebuilding....": "Adatbázis újraépítés...", - "Database rebuilt!": "Adatbázis újraépítve!", - "Site updated!": "Az oldal frissítve!", - "Delete this site": "Az oldal törlése", - "File write error: ": "Fájl írási hiba: ", - "Site settings saved!": "Az oldal beállításai elmentve!", - "Enter your private key:": "Add meg a prviát kulcsod:", - " Signed!": " Aláírva!", - "WebGL not supported": "WebGL nem támogatott" -} \ No newline at end of file diff --git a/plugins/Sidebar/languages/it.json b/plugins/Sidebar/languages/it.json deleted file mode 100644 index 6aa0969a..00000000 --- a/plugins/Sidebar/languages/it.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "Peers": "Peer", - "Connected": "Connessi", - "Connectable": "Collegabili", - "Connectable peers": "Peer collegabili", - - "Data transfer": "Trasferimento dati", - "Received": "Ricevuti", - "Received bytes": "Byte ricevuti", - "Sent": "Inviati", - "Sent bytes": "Byte inviati", - - "Files": "File", - "Total": "Totale", - "Image": "Imagine", - "Other": "Altro", - "User data": "Dati utente", - - "Size limit": "Limite dimensione", - "limit used": "limite usato", - "free space": "spazio libero", - "Set": "Imposta", - - "Optional files": "File facoltativi", - "Downloaded": "Scaricati", - "Download and help distribute all files": "Scarica e aiuta a distribuire tutti i file", - "Total size": "Dimensione totale", - "Downloaded files": "File scaricati", - - "Database": "Database", - "search feeds": "ricerca di feed", - "{feeds} query": "{feeds} interrogazione", - "Reload": "Ricaricare", - "Rebuild": "Ricostruire", - "No database found": "Nessun database trovato", - - "Identity address": "Indirizzo di identità", - "Change": "Cambia", - - "Update": "Aggiorna", - "Pause": "Sospendi", - "Resume": "Riprendi", - "Delete": "Cancella", - "Are you sure?": "Sei sicuro?", - - "Site address": "Indirizzo sito", - "Donate": "Dona", - - "Missing files": "File mancanti", - "{} try": "{} tenta", - "{} tries": "{} prova", - "+ {num_bad_files} more": "+ {num_bad_files} altri", - - "This is my site": "Questo è il mio sito", - "Site title": "Titolo sito", - "Site description": "Descrizione sito", - "Save site settings": "Salva impostazioni sito", - - "Content publishing": "Pubblicazione contenuto", - "Choose": "Scegli", - "Sign": "Firma", - "Publish": "Pubblica", - - "This function is disabled on this proxy": "Questa funzione è disabilitata su questo proxy", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Errore scaricamento database GeoLite2 City: {}!
    Si prega di scaricarlo manualmente e spacchetarlo nella cartella dir:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "Scaricamento database GeoLite2 City (solo una volta, ~20MB)...", - "GeoLite2 City database downloaded!": "Database GeoLite2 City scaricato!", - - "Are you sure?": "Sei sicuro?", - "Site storage limit modified!": "Limite di archiviazione del sito modificato!", - "Database schema reloaded!": "Schema database ricaricato!", - "Database rebuilding....": "Ricostruzione database...", - "Database rebuilt!": "Database ricostruito!", - "Site updated!": "Sito aggiornato!", - "Delete this site": "Cancella questo sito", - "File write error: ": "Errore scrittura file:", - "Site settings saved!": "Impostazioni sito salvate!", - "Enter your private key:": "Inserisci la tua chiave privata:", - " Signed!": " Firmato!", - "WebGL not supported": "WebGL non supportato" -} diff --git a/plugins/Sidebar/languages/jp.json b/plugins/Sidebar/languages/jp.json deleted file mode 100644 index 99b34564..00000000 --- a/plugins/Sidebar/languages/jp.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "Peers": "ピア", - "Connected": "接続済み", - "Connectable": "利用可能", - "Connectable peers": "ピアに接続可能", - - "Data transfer": "データ転送", - "Received": "受信", - "Received bytes": "受信バイト数", - "Sent": "送信", - "Sent bytes": "送信バイト数", - - "Files": "ファイル", - "Total": "合計", - "Image": "画像", - "Other": "その他", - "User data": "ユーザーデータ", - - "Size limit": "サイズ制限", - "limit used": "使用上限", - "free space": "フリースペース", - "Set": "セット", - - "Optional files": "オプション ファイル", - "Downloaded": "ダウンロード済み", - "Download and help distribute all files": "ダウンロードしてすべてのファイルの配布を支援する", - "Total size": "合計サイズ", - "Downloaded files": "ダウンロードされたファイル", - - "Database": "データベース", - "search feeds": "フィードを検索する", - "{feeds} query": "{フィード} お問い合わせ", - "Reload": "再読込", - "Rebuild": "再ビルド", - "No database found": "データベースが見つかりません", - - "Identity address": "Identity address", - "Change": "編集", - - "Site control": "サイト管理", - "Update": "更新", - "Pause": "一時停止", - "Resume": "再開", - "Delete": "削除", - "Are you sure?": "本当によろしいですか?", - - "Site address": "サイトアドレス", - "Donate": "寄付する", - - "Missing files": "ファイルがありません", - "{} try": "{} 試す", - "{} tries": "{} 試行", - "+ {num_bad_files} more": "+ {num_bad_files} more", - - "This is my site": "This is my site", - "Site title": "サイトタイトル", - "Site description": "サイトの説明", - "Save site settings": "サイトの設定を保存する", - - "Content publishing": "コンテンツを公開する", - "Choose": "選択", - "Sign": "Sign", - "Publish": "公開する", - - "This function is disabled on this proxy": "この機能はこのプロキシで無効になっています", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Cityデータベースのダウンロードエラー: {}!
    手動でダウンロードして、フォルダに解凍してください。:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Cityデータベースの読み込み (これは一度だけ行われます, ~20MB)...", - "GeoLite2 City database downloaded!": "GeoLite2 Cityデータベースがダウンロードされました!", - - "Are you sure?": "本当によろしいですか?", - "Site storage limit modified!": "サイトの保存容量の制限が変更されました!", - "Database schema reloaded!": "データベーススキーマがリロードされました!", - "Database rebuilding....": "データベースの再構築中....", - "Database rebuilt!": "データベースが再構築されました!", - "Site updated!": "サイトが更新されました!", - "Delete this site": "このサイトを削除する", - "File write error: ": "ファイル書き込みエラー:", - "Site settings saved!": "サイト設定が保存されました!", - "Enter your private key:": "秘密鍵を入力してください:", - " Signed!": " Signed!", - "WebGL not supported": "WebGLはサポートされていません" -} diff --git a/plugins/Sidebar/languages/pl.json b/plugins/Sidebar/languages/pl.json deleted file mode 100644 index 93268507..00000000 --- a/plugins/Sidebar/languages/pl.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "Peers": "Użytkownicy równorzędni", - "Connected": "Połączony", - "Connectable": "Możliwy do podłączenia", - "Connectable peers": "Połączeni użytkownicy równorzędni", - - "Data transfer": "Transfer danych", - "Received": "Odebrane", - "Received bytes": "Odebrany bajty", - "Sent": "Wysłane", - "Sent bytes": "Wysłane bajty", - - "Files": "Pliki", - "Total": "Sumarycznie", - "Image": "Obraz", - "Other": "Inne", - "User data": "Dane użytkownika", - - "Size limit": "Rozmiar limitu", - "limit used": "zużyty limit", - "free space": "wolna przestrzeń", - "Set": "Ustaw", - - "Optional files": "Pliki opcjonalne", - "Downloaded": "Ściągnięte", - "Download and help distribute all files": "Ściągnij i pomóż rozpowszechniać wszystkie pliki", - "Total size": "Rozmiar sumaryczny", - "Downloaded files": "Ściągnięte pliki", - - "Database": "Baza danych", - "search feeds": "przeszukaj zasoby", - "{feeds} query": "{feeds} pytanie", - "Reload": "Odśwież", - "Rebuild": "Odbuduj", - "No database found": "Nie odnaleziono bazy danych", - - "Identity address": "Adres identyfikacyjny", - "Change": "Zmień", - - "Site control": "Kontrola strony", - "Update": "Zaktualizuj", - "Pause": "Wstrzymaj", - "Resume": "Wznów", - "Delete": "Skasuj", - "Are you sure?": "Jesteś pewien?", - - "Site address": "Adres strony", - "Donate": "Wspomóż", - - "Missing files": "Brakujące pliki", - "{} try": "{} próba", - "{} tries": "{} próby", - "+ {num_bad_files} more": "+ {num_bad_files} więcej", - - "This is my site": "To moja strona", - "Site title": "Tytuł strony", - "Site description": "Opis strony", - "Save site settings": "Zapisz ustawienia strony", - - "Content publishing": "Publikowanie treści", - "Choose": "Wybierz", - "Sign": "Podpisz", - "Publish": "Opublikuj", - - "This function is disabled on this proxy": "Ta funkcja jest zablokowana w tym proxy", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Błąd ściągania bazy danych GeoLite2 City: {}!
    Proszę ściągnąć ją recznie i wypakować do katalogu danych:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "Ściąganie bazy danych GeoLite2 City (tylko jednorazowo, ok. 20MB)...", - "GeoLite2 City database downloaded!": "Baza danych GeoLite2 City ściagnięta!", - - "Are you sure?": "Jesteś pewien?", - "Site storage limit modified!": "Limit pamięci strony zmodyfikowany!", - "Database schema reloaded!": "Schemat bazy danych załadowany ponownie!", - "Database rebuilding....": "Przebudowywanie bazy danych...", - "Database rebuilt!": "Baza danych przebudowana!", - "Site updated!": "Strona zaktualizowana!", - "Delete this site": "Usuń tę stronę", - "File write error: ": "Błąd zapisu pliku: ", - "Site settings saved!": "Ustawienia strony zapisane!", - "Enter your private key:": "Wpisz swój prywatny klucz:", - " Signed!": " Podpisane!", - "WebGL not supported": "WebGL nie jest obsługiwany" -} diff --git a/plugins/Sidebar/languages/pt-br.json b/plugins/Sidebar/languages/pt-br.json deleted file mode 100644 index 44fe06a2..00000000 --- a/plugins/Sidebar/languages/pt-br.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "Copy to clipboard": "Copiar para área de transferência (clipboard)", - "Peers": "Peers", - "Connected": "Ligados", - "Connectable": "Disponíveis", - "Onion": "Onion", - "Local": "Locais", - "Connectable peers": "Peers disponíveis", - - "Data transfer": "Transferência de dados", - "Received": "Recebidos", - "Received bytes": "Bytes recebidos", - "Sent": "Enviados", - "Sent bytes": "Bytes enviados", - - "Files": "Arquivos", - "Save as .zip": "Salvar como .zip", - "Total": "Total", - "Image": "Imagem", - "Other": "Outros", - "User data": "Dados do usuário", - - "Size limit": "Limite de tamanho", - "limit used": "limite utilizado", - "free space": "espaço livre", - "Set": "Definir", - - "Optional files": "Arquivos opcionais", - "Downloaded": "Baixados", - "Download and help distribute all files": "Baixar e ajudar a distribuir todos os arquivos", - "Total size": "Tamanho total", - "Downloaded files": "Arquivos baixados", - - "Database": "Banco de dados", - "search feeds": "pesquisar feeds", - "{feeds} query": "consulta de {feeds}", - "Reload": "Recarregar", - "Rebuild": "Reconstruir", - "No database found": "Base de dados não encontrada", - - "Identity address": "Endereço de identidade", - "Change": "Alterar", - - "Site control": "Controle do site", - "Update": "Atualizar", - "Pause": "Suspender", - "Resume": "Continuar", - "Delete": "Remover", - "Are you sure?": "Tem certeza?", - - "Site address": "Endereço do site", - "Donate": "Doar", - - "Needs to be updated": "Necessitam ser atualizados", - "{} try": "{} tentativa", - "{} tries": "{} tentativas", - "+ {num_bad_files} more": "+ {num_bad_files} adicionais", - - "This is my site": "Este é o meu site", - "Site title": "Título do site", - "Site description": "Descrição do site", - "Save site settings": "Salvar definições do site", - "Open site directory": "Abrir diretório do site", - - "Content publishing": "内容发布", - "Content publishing": "Publicação do conteúdo", - "Choose": "Escolher", - "Sign": "Assinar", - "Publish": "Publicar", - "Sign and publish": "Assinar e publicar", - "add saved private key": "adicionar privatekey (chave privada) para salvar", - "Private key saved for site signing": "Privatekey foi salva para assinar o site", - "Private key saved.": "Privatekey salva.", - "forgot": "esquecer", - "Saved private key removed": "Privatekey salva foi removida", - "This function is disabled on this proxy": "Esta função encontra-se desativada neste proxy", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erro ao baixar a base de dados GeoLite2 City: {}!
    Por favor baixe manualmente e descompacte os dados para a seguinte pasta:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "Baixando a base de dados GeoLite2 City (uma única vez, ~20MB)...", - "GeoLite2 City database downloaded!": "A base de dados GeoLite2 City foi baixada!", - - "Are you sure?": "Tem certeza?", - "Site storage limit modified!": "O limite de armazenamento do site foi modificado!", - "Database schema reloaded!": "O esquema da base de dados foi atualizado!", - "Database rebuilding....": "Reconstruindo base de dados...", - "Database rebuilt!": "Base de dados reconstruída!", - "Site updated!": "Site atualizado!", - "Delete this site": "Remover este site", - "Blacklist": "Blacklist", - "Blacklist this site": "Blacklistar este site", - "Reason": "Motivo", - "Delete and Blacklist": "Deletar e blacklistar", - "File write error: ": "Erro de escrita de arquivo: ", - "Site settings saved!": "Definições do site salvas!", - "Enter your private key:": "Digite sua chave privada:", - " Signed!": " Assinado!", - "WebGL not supported": "WebGL não é suportado" -} diff --git a/plugins/Sidebar/languages/ru.json b/plugins/Sidebar/languages/ru.json deleted file mode 100644 index f2eeca04..00000000 --- a/plugins/Sidebar/languages/ru.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "Peers": "Пиры", - "Connected": "Подключенные", - "Connectable": "Доступные", - "Connectable peers": "Пиры доступны для подключения", - - "Data transfer": "Передача данных", - "Received": "Получено", - "Received bytes": "Получено байн", - "Sent": "Отправлено", - "Sent bytes": "Отправлено байт", - - "Files": "Файлы", - "Total": "Всего", - "Image": "Изображений", - "Other": "Другое", - "User data": "Ваш контент", - - "Size limit": "Ограничение по размеру", - "limit used": "Использовано", - "free space": "Доступно", - "Set": "Установить", - - "Optional files": "Опциональные файлы", - "Downloaded": "Загружено", - "Download and help distribute all files": "Загрузить опциональные файлы для помощи сайту", - "Total size": "Объём", - "Downloaded files": "Загруженные файлы", - - "Database": "База данных", - "search feeds": "поиск подписок", - "{feeds} query": "{feeds} запрос", - "Reload": "Перезагрузить", - "Rebuild": "Перестроить", - "No database found": "База данных не найдена", - - "Identity address": "Уникальный адрес", - "Change": "Изменить", - - "Site control": "Управление сайтом", - "Update": "Обновить", - "Pause": "Пауза", - "Resume": "Продолжить", - "Delete": "Удалить", - "Are you sure?": "Вы уверены?", - - "Site address": "Адрес сайта", - "Donate": "Пожертвовать", - - "Missing files": "Отсутствующие файлы", - "{} try": "{} попробовать", - "{} tries": "{} попыток", - "+ {num_bad_files} more": "+ {num_bad_files} ещё", - - "This is my site": "Это мой сайт", - "Site title": "Название сайта", - "Site description": "Описание сайта", - "Save site settings": "Сохранить настройки сайта", - - "Content publishing": "Публикация контента", - "Choose": "Выбрать", - "Sign": "Подписать", - "Publish": "Опубликовать", - - "This function is disabled on this proxy": "Эта функция отключена на этом прокси", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Ошибка загрузки базы городов GeoLite2: {}!
    Пожалуйста, загрузите её вручную и распакуйте в папку:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "Загрузка базы городов GeoLite2 (это делается только 1 раз, ~20MB)...", - "GeoLite2 City database downloaded!": "База GeoLite2 успешно загружена!", - - "Are you sure?": "Вы уверены?", - "Site storage limit modified!": "Лимит хранилища для сайта изменен!", - "Database schema reloaded!": "Схема базы данных перезагружена!", - "Database rebuilding....": "Перестройка базы данных...", - "Database rebuilt!": "База данных перестроена!", - "Site updated!": "Сайт обновлён!", - "Delete this site": "Удалить этот сайт", - "File write error: ": "Ошибка записи файла:", - "Site settings saved!": "Настройки сайта сохранены!", - "Enter your private key:": "Введите свой приватный ключ:", - " Signed!": " Подписано!", - "WebGL not supported": "WebGL не поддерживается" -} diff --git a/plugins/Sidebar/languages/tr.json b/plugins/Sidebar/languages/tr.json deleted file mode 100644 index 88fcd6e0..00000000 --- a/plugins/Sidebar/languages/tr.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "Peers": "Eşler", - "Connected": "Bağlı", - "Connectable": "Erişilebilir", - "Connectable peers": "Bağlanılabilir eşler", - - "Data transfer": "Veri aktarımı", - "Received": "Alınan", - "Received bytes": "Bayt alındı", - "Sent": "Gönderilen", - "Sent bytes": "Bayt gönderildi", - - "Files": "Dosyalar", - "Total": "Toplam", - "Image": "Resim", - "Other": "Diğer", - "User data": "Kullanıcı verisi", - - "Size limit": "Boyut sınırı", - "limit used": "kullanılan", - "free space": "boş", - "Set": "Ayarla", - - "Optional files": "İsteğe bağlı dosyalar", - "Downloaded": "İndirilen", - "Download and help distribute all files": "Tüm dosyaları indir ve yayılmalarına yardım et", - "Total size": "Toplam boyut", - "Downloaded files": "İndirilen dosyalar", - - "Database": "Veritabanı", - "search feeds": "kaynak ara", - "{feeds} query": "{feeds} sorgu", - "Reload": "Yenile", - "Rebuild": "Yapılandır", - "No database found": "Veritabanı yok", - - "Identity address": "Kimlik adresi", - "Change": "Değiştir", - - "Site control": "Site kontrolü", - "Update": "Güncelle", - "Pause": "Duraklat", - "Resume": "Sürdür", - "Delete": "Sil", - "Are you sure?": "Emin misin?", - - "Site address": "Site adresi", - "Donate": "Bağış yap", - - "Missing files": "Eksik dosyalar", - "{} try": "{} deneme", - "{} tries": "{} deneme", - "+ {num_bad_files} more": "+ {num_bad_files} tane daha", - - "This is my site": "Bu benim sitem", - "Site title": "Site başlığı", - "Site description": "Site açıklaması", - "Save site settings": "Site ayarlarını kaydet", - - "Content publishing": "İçerik yayımlanıyor", - "Choose": "Seç", - "Sign": "İmzala", - "Publish": "Yayımla", - - "This function is disabled on this proxy": "Bu özellik bu vekilde kullanılamaz", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Şehir veritabanı indirme hatası: {}!
    Lütfen kendiniz indirip aşağıdaki konuma açınınız:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Şehir veritabanı indiriliyor (sadece bir kere, ~20MB)...", - "GeoLite2 City database downloaded!": "GeoLite2 Şehir veritabanı indirildi!", - - "Are you sure?": "Emin misiniz?", - "Site storage limit modified!": "Site saklama sınırı değiştirildi!", - "Database schema reloaded!": "Veritabanı şeması yeniden yüklendi!", - "Database rebuilding....": "Veritabanı yeniden inşa ediliyor...", - "Database rebuilt!": "Veritabanı yeniden inşa edildi!", - "Site updated!": "Site güncellendi!", - "Delete this site": "Bu siteyi sil", - "File write error: ": "Dosya yazma hatası: ", - "Site settings saved!": "Site ayarları kaydedildi!", - "Enter your private key:": "Özel anahtarınızı giriniz:", - " Signed!": " İmzala!", - "WebGL not supported": "WebGL desteklenmiyor" -} diff --git a/plugins/Sidebar/languages/zh-tw.json b/plugins/Sidebar/languages/zh-tw.json deleted file mode 100644 index 9d4ea1be..00000000 --- a/plugins/Sidebar/languages/zh-tw.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "Peers": "節點數", - "Connected": "已連線", - "Connectable": "可連線", - "Connectable peers": "可連線節點", - - "Data transfer": "數據傳輸", - "Received": "已接收", - "Received bytes": "已接收位元組", - "Sent": "已傳送", - "Sent bytes": "已傳送位元組", - - "Files": "檔案", - "Total": "共計", - "Image": "圖片", - "Other": "其他", - "User data": "使用者數據", - - "Size limit": "大小限制", - "limit used": "已使用", - "free space": "可用空間", - "Set": "偏好設定", - - "Optional files": "可選檔案", - "Downloaded": "已下載", - "Download and help distribute all files": "下載並幫助分發所有檔案", - "Total size": "總大小", - "Downloaded files": "下載的檔案", - - "Database": "資料庫", - "search feeds": "搜尋供稿", - "{feeds} query": "{feeds} 查詢 ", - "Reload": "重新整理", - "Rebuild": "重建", - "No database found": "未找到資料庫", - - "Identity address": "身分位址", - "Change": "變更", - - "Site control": "網站控制", - "Update": "更新", - "Pause": "暫停", - "Resume": "恢復", - "Delete": "刪除", - "Are you sure?": "你確定?", - - "Site address": "網站位址", - "Donate": "捐贈", - - "Missing files": "缺少的檔案", - "{} try": "{} 嘗試", - "{} tries": "{} 已嘗試", - "+ {num_bad_files} more": "+ {num_bad_files} 更多", - - "This is my site": "這是我的網站", - "Site title": "網站標題", - "Site description": "網站描述", - "Save site settings": "存儲網站設定", - "Open site directory": "打開所在資料夾", - - "Content publishing": "內容發布", - "Choose": "選擇", - "Sign": "簽署", - "Publish": "發布", - "Sign and publish": "簽名並發布", - "This function is disabled on this proxy": "此代理上禁用此功能", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置資料庫下載錯誤:{}!
    請手動下載並解壓到數據目錄:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下載 GeoLite2 地理位置資料庫 (僅一次,約 20MB )...", - "GeoLite2 City database downloaded!": "GeoLite2 地理位置資料庫已下載!", - - "Are you sure?": "你確定?", - "Site storage limit modified!": "網站存儲限制已變更!", - "Database schema reloaded!": "資料庫架構重新加載!", - "Database rebuilding....": "資料庫重建中...", - "Database rebuilt!": "資料庫已重建!", - "Site updated!": "網站已更新!", - "Delete this site": "刪除此網站", - "File write error: ": "檔案寫入錯誤:", - "Site settings saved!": "網站設置已保存!", - "Enter your private key:": "輸入您的私鑰:", - " Signed!": " 已簽署!", - "WebGL not supported": "不支援 WebGL" -} diff --git a/plugins/Sidebar/languages/zh.json b/plugins/Sidebar/languages/zh.json deleted file mode 100644 index 696084cf..00000000 --- a/plugins/Sidebar/languages/zh.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "Copy to clipboard": "复制到剪切板", - "Peers": "节点数", - "Connected": "已连接", - "Connectable": "可连接", - "Onion": "洋葱点", - "Local": "局域网", - "Connectable peers": "可连接节点", - - "Data transfer": "数据传输", - "Received": "已接收", - "Received bytes": "已接收字节", - "Sent": "已发送", - "Sent bytes": "已发送字节", - - "Files": "文件", - "Save as .zip": "打包成zip文件", - "Total": "总计", - "Image": "图像", - "Other": "其他", - "User data": "用户数据", - - "Size limit": "大小限制", - "limit used": "限额", - "free space": "剩余空间", - "Set": "设置", - - "Optional files": "可选文件", - "Downloaded": "已下载", - "Download and help distribute all files": "下载并帮助分发所有文件", - "Auto download big file size limit": "自动下载大文件大小限制", - "Total size": "总大小", - "Downloaded files": "已下载文件", - - "Database": "数据库", - "search feeds": "搜索数据源", - "{feeds} query": "{feeds} 请求", - "Reload": "重载", - "Rebuild": "重建", - "No database found": "没有找到数据库", - - "Identity address": "身份地址", - "Change": "更改", - - "Site control": "站点控制", - "Update": "更新", - "Pause": "暂停", - "Resume": "恢复", - "Delete": "删除", - "Are you sure?": "您确定吗?", - - "Site address": "站点地址", - "Donate": "捐赠", - - "Needs to be updated": "需要更新", - "{} try": "{} 尝试", - "{} tries": "{} 已尝试", - "+ {num_bad_files} more": "+ {num_bad_files} 更多", - - "This is my site": "这是我的站点", - "Site title": "站点标题", - "Site description": "站点描述", - "Save site settings": "保存站点设置", - "Open site directory": "打开所在文件夹", - - "Content publishing": "内容发布", - "Add saved private key": "添加并保存私钥", - "Save": "保存", - "Private key saved.": "私钥已保存", - "Private key saved for site signing": "已保存用于站点签名的私钥", - "Forgot": "删除私钥", - "Saved private key removed": "保存的私钥已删除", - "Choose": "选择", - "Sign": "签名", - "Publish": "发布", - "Sign and publish": "签名并发布", - "This function is disabled on this proxy": "此功能在代理上被禁用", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置数据库下载错误:{}!
    请手动下载并解压在数据目录:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下载 GeoLite2 地理位置数据库 (仅需一次,约 20MB )...", - "GeoLite2 City database downloaded!": "GeoLite2 地理位置数据库已下载!", - - "Are you sure?": "您确定吗?", - "Site storage limit modified!": "站点存储限制已更改!", - "Database schema reloaded!": "数据库模式已重新加载!", - "Database rebuilding....": "数据库重建中...", - "Database rebuilt!": "数据库已重建!", - "Site updated!": "站点已更新!", - "Delete this site": "删除此站点", - "Blacklist": "黑名单", - "Blacklist this site": "拉黑此站点", - "Reason": "原因", - "Delete and Blacklist": "删除并拉黑", - "File write error: ": "文件写入错误:", - "Site settings saved!": "站点设置已保存!", - "Enter your private key:": "输入您的私钥:", - " Signed!": " 已签名!", - "WebGL not supported": "不支持 WebGL" -} diff --git a/plugins/Sidebar/maxminddb/__init__.py b/plugins/Sidebar/maxminddb/__init__.py deleted file mode 100644 index fc28186b..00000000 --- a/plugins/Sidebar/maxminddb/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# pylint:disable=C0111 -import os - -import maxminddb.reader - -try: - import maxminddb.extension -except ImportError: - maxminddb.extension = None - -from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE, - MODE_MEMORY) -from maxminddb.decoder import InvalidDatabaseError - - -def open_database(database, mode=MODE_AUTO): - """Open a Maxmind DB database - - Arguments: - database -- A path to a valid MaxMind DB file such as a GeoIP2 - database file. - mode -- mode to open the database with. Valid mode are: - * MODE_MMAP_EXT - use the C extension with memory map. - * MODE_MMAP - read from memory map. Pure Python. - * MODE_FILE - read database as standard file. Pure Python. - * MODE_MEMORY - load database into memory. Pure Python. - * MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that - order. Default mode. - """ - if (mode == MODE_AUTO and maxminddb.extension and - hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT: - return maxminddb.extension.Reader(database) - elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY): - return maxminddb.reader.Reader(database, mode) - raise ValueError('Unsupported open mode: {0}'.format(mode)) - - -def Reader(database): # pylint: disable=invalid-name - """This exists for backwards compatibility. Use open_database instead""" - return open_database(database) - -__title__ = 'maxminddb' -__version__ = '1.2.0' -__author__ = 'Gregory Oschwald' -__license__ = 'Apache License, Version 2.0' -__copyright__ = 'Copyright 2014 Maxmind, Inc.' diff --git a/plugins/Sidebar/maxminddb/compat.py b/plugins/Sidebar/maxminddb/compat.py deleted file mode 100644 index 14c98832..00000000 --- a/plugins/Sidebar/maxminddb/compat.py +++ /dev/null @@ -1,28 +0,0 @@ -import sys - -# pylint: skip-file - -if sys.version_info[0] == 2: - import ipaddr as ipaddress # pylint:disable=F0401 - ipaddress.ip_address = ipaddress.IPAddress - - int_from_byte = ord - - FileNotFoundError = IOError - - def int_from_bytes(b): - if b: - return int(b.encode("hex"), 16) - return 0 - - byte_from_int = chr -else: - import ipaddress # pylint:disable=F0401 - - int_from_byte = lambda x: x - - FileNotFoundError = FileNotFoundError - - int_from_bytes = lambda x: int.from_bytes(x, 'big') - - byte_from_int = lambda x: bytes([x]) diff --git a/plugins/Sidebar/maxminddb/const.py b/plugins/Sidebar/maxminddb/const.py deleted file mode 100644 index 59ea84b6..00000000 --- a/plugins/Sidebar/maxminddb/const.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Constants used in the API""" - -MODE_AUTO = 0 -MODE_MMAP_EXT = 1 -MODE_MMAP = 2 -MODE_FILE = 4 -MODE_MEMORY = 8 diff --git a/plugins/Sidebar/maxminddb/decoder.py b/plugins/Sidebar/maxminddb/decoder.py deleted file mode 100644 index 1b8f0711..00000000 --- a/plugins/Sidebar/maxminddb/decoder.py +++ /dev/null @@ -1,173 +0,0 @@ -""" -maxminddb.decoder -~~~~~~~~~~~~~~~~~ - -This package contains code for decoding the MaxMind DB data section. - -""" -from __future__ import unicode_literals - -import struct - -from maxminddb.compat import byte_from_int, int_from_bytes -from maxminddb.errors import InvalidDatabaseError - - -class Decoder(object): # pylint: disable=too-few-public-methods - - """Decoder for the data section of the MaxMind DB""" - - def __init__(self, database_buffer, pointer_base=0, pointer_test=False): - """Created a Decoder for a MaxMind DB - - Arguments: - database_buffer -- an mmap'd MaxMind DB file. - pointer_base -- the base number to use when decoding a pointer - pointer_test -- used for internal unit testing of pointer code - """ - self._pointer_test = pointer_test - self._buffer = database_buffer - self._pointer_base = pointer_base - - def _decode_array(self, size, offset): - array = [] - for _ in range(size): - (value, offset) = self.decode(offset) - array.append(value) - return array, offset - - def _decode_boolean(self, size, offset): - return size != 0, offset - - def _decode_bytes(self, size, offset): - new_offset = offset + size - return self._buffer[offset:new_offset], new_offset - - # pylint: disable=no-self-argument - # |-> I am open to better ways of doing this as long as it doesn't involve - # lots of code duplication. - def _decode_packed_type(type_code, type_size, pad=False): - # pylint: disable=protected-access, missing-docstring - def unpack_type(self, size, offset): - if not pad: - self._verify_size(size, type_size) - new_offset = offset + type_size - packed_bytes = self._buffer[offset:new_offset] - if pad: - packed_bytes = packed_bytes.rjust(type_size, b'\x00') - (value,) = struct.unpack(type_code, packed_bytes) - return value, new_offset - return unpack_type - - def _decode_map(self, size, offset): - container = {} - for _ in range(size): - (key, offset) = self.decode(offset) - (value, offset) = self.decode(offset) - container[key] = value - return container, offset - - _pointer_value_offset = { - 1: 0, - 2: 2048, - 3: 526336, - 4: 0, - } - - def _decode_pointer(self, size, offset): - pointer_size = ((size >> 3) & 0x3) + 1 - new_offset = offset + pointer_size - pointer_bytes = self._buffer[offset:new_offset] - packed = pointer_bytes if pointer_size == 4 else struct.pack( - b'!c', byte_from_int(size & 0x7)) + pointer_bytes - unpacked = int_from_bytes(packed) - pointer = unpacked + self._pointer_base + \ - self._pointer_value_offset[pointer_size] - if self._pointer_test: - return pointer, new_offset - (value, _) = self.decode(pointer) - return value, new_offset - - def _decode_uint(self, size, offset): - new_offset = offset + size - uint_bytes = self._buffer[offset:new_offset] - return int_from_bytes(uint_bytes), new_offset - - def _decode_utf8_string(self, size, offset): - new_offset = offset + size - return self._buffer[offset:new_offset].decode('utf-8'), new_offset - - _type_decoder = { - 1: _decode_pointer, - 2: _decode_utf8_string, - 3: _decode_packed_type(b'!d', 8), # double, - 4: _decode_bytes, - 5: _decode_uint, # uint16 - 6: _decode_uint, # uint32 - 7: _decode_map, - 8: _decode_packed_type(b'!i', 4, pad=True), # int32 - 9: _decode_uint, # uint64 - 10: _decode_uint, # uint128 - 11: _decode_array, - 14: _decode_boolean, - 15: _decode_packed_type(b'!f', 4), # float, - } - - def decode(self, offset): - """Decode a section of the data section starting at offset - - Arguments: - offset -- the location of the data structure to decode - """ - new_offset = offset + 1 - (ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset]) - type_num = ctrl_byte >> 5 - # Extended type - if not type_num: - (type_num, new_offset) = self._read_extended(new_offset) - - if not type_num in self._type_decoder: - raise InvalidDatabaseError('Unexpected type number ({type}) ' - 'encountered'.format(type=type_num)) - - (size, new_offset) = self._size_from_ctrl_byte( - ctrl_byte, new_offset, type_num) - return self._type_decoder[type_num](self, size, new_offset) - - def _read_extended(self, offset): - (next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1]) - type_num = next_byte + 7 - if type_num < 7: - raise InvalidDatabaseError( - 'Something went horribly wrong in the decoder. An ' - 'extended type resolved to a type number < 8 ' - '({type})'.format(type=type_num)) - return type_num, offset + 1 - - def _verify_size(self, expected, actual): - if expected != actual: - raise InvalidDatabaseError( - 'The MaxMind DB file\'s data section contains bad data ' - '(unknown data type or corrupt data)' - ) - - def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num): - size = ctrl_byte & 0x1f - if type_num == 1: - return size, offset - bytes_to_read = 0 if size < 29 else size - 28 - - new_offset = offset + bytes_to_read - size_bytes = self._buffer[offset:new_offset] - - # Using unpack rather than int_from_bytes as it is about 200 lookups - # per second faster here. - if size == 29: - size = 29 + struct.unpack(b'!B', size_bytes)[0] - elif size == 30: - size = 285 + struct.unpack(b'!H', size_bytes)[0] - elif size > 30: - size = struct.unpack( - b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821 - - return size, new_offset diff --git a/plugins/Sidebar/maxminddb/errors.py b/plugins/Sidebar/maxminddb/errors.py deleted file mode 100644 index f04ff028..00000000 --- a/plugins/Sidebar/maxminddb/errors.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -maxminddb.errors -~~~~~~~~~~~~~~~~ - -This module contains custom errors for the MaxMind DB reader -""" - - -class InvalidDatabaseError(RuntimeError): - - """This error is thrown when unexpected data is found in the database.""" diff --git a/plugins/Sidebar/maxminddb/extension/maxminddb.c b/plugins/Sidebar/maxminddb/extension/maxminddb.c deleted file mode 100644 index 9e4d45e2..00000000 --- a/plugins/Sidebar/maxminddb/extension/maxminddb.c +++ /dev/null @@ -1,570 +0,0 @@ -#include -#include -#include "structmember.h" - -#define __STDC_FORMAT_MACROS -#include - -static PyTypeObject Reader_Type; -static PyTypeObject Metadata_Type; -static PyObject *MaxMindDB_error; - -typedef struct { - PyObject_HEAD /* no semicolon */ - MMDB_s *mmdb; -} Reader_obj; - -typedef struct { - PyObject_HEAD /* no semicolon */ - PyObject *binary_format_major_version; - PyObject *binary_format_minor_version; - PyObject *build_epoch; - PyObject *database_type; - PyObject *description; - PyObject *ip_version; - PyObject *languages; - PyObject *node_count; - PyObject *record_size; -} Metadata_obj; - -static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list); -static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list); -static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list); -static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list); - -#if PY_MAJOR_VERSION >= 3 - #define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void) - #define RETURN_MOD_INIT(m) return (m) - #define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError -#else - #define MOD_INIT(name) PyMODINIT_FUNC init ## name(void) - #define RETURN_MOD_INIT(m) return - #define PyInt_FromLong PyLong_FromLong - #define FILE_NOT_FOUND_ERROR PyExc_IOError -#endif - -#ifdef __GNUC__ - # define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) -#else - # define UNUSED(x) UNUSED_ ## x -#endif - -static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds) -{ - char *filename; - int mode = 0; - - static char *kwlist[] = {"database", "mode", NULL}; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) { - return -1; - } - - if (mode != 0 && mode != 1) { - PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only " - "MODE_AUTO and MODE_MMAP_EXT are supported by this extension.", - mode); - return -1; - } - - if (0 != access(filename, R_OK)) { - PyErr_Format(FILE_NOT_FOUND_ERROR, - "No such file or directory: '%s'", - filename); - return -1; - } - - MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s)); - if (NULL == mmdb) { - PyErr_NoMemory(); - return -1; - } - - Reader_obj *mmdb_obj = (Reader_obj *)self; - if (!mmdb_obj) { - free(mmdb); - PyErr_NoMemory(); - return -1; - } - - uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb); - - if (MMDB_SUCCESS != status) { - free(mmdb); - PyErr_Format( - MaxMindDB_error, - "Error opening database file (%s). Is this a valid MaxMind DB file?", - filename - ); - return -1; - } - - mmdb_obj->mmdb = mmdb; - return 0; -} - -static PyObject *Reader_get(PyObject *self, PyObject *args) -{ - char *ip_address = NULL; - - Reader_obj *mmdb_obj = (Reader_obj *)self; - if (!PyArg_ParseTuple(args, "s", &ip_address)) { - return NULL; - } - - MMDB_s *mmdb = mmdb_obj->mmdb; - - if (NULL == mmdb) { - PyErr_SetString(PyExc_ValueError, - "Attempt to read from a closed MaxMind DB."); - return NULL; - } - - int gai_error = 0; - int mmdb_error = MMDB_SUCCESS; - MMDB_lookup_result_s result = - MMDB_lookup_string(mmdb, ip_address, &gai_error, - &mmdb_error); - - if (0 != gai_error) { - PyErr_Format(PyExc_ValueError, - "'%s' does not appear to be an IPv4 or IPv6 address.", - ip_address); - return NULL; - } - - if (MMDB_SUCCESS != mmdb_error) { - PyObject *exception; - if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) { - exception = PyExc_ValueError; - } else { - exception = MaxMindDB_error; - } - PyErr_Format(exception, "Error looking up %s. %s", - ip_address, MMDB_strerror(mmdb_error)); - return NULL; - } - - if (!result.found_entry) { - Py_RETURN_NONE; - } - - MMDB_entry_data_list_s *entry_data_list = NULL; - int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list); - if (MMDB_SUCCESS != status) { - PyErr_Format(MaxMindDB_error, - "Error while looking up data for %s. %s", - ip_address, MMDB_strerror(status)); - MMDB_free_entry_data_list(entry_data_list); - return NULL; - } - - MMDB_entry_data_list_s *original_entry_data_list = entry_data_list; - PyObject *py_obj = from_entry_data_list(&entry_data_list); - MMDB_free_entry_data_list(original_entry_data_list); - return py_obj; -} - -static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args)) -{ - Reader_obj *mmdb_obj = (Reader_obj *)self; - - if (NULL == mmdb_obj->mmdb) { - PyErr_SetString(PyExc_IOError, - "Attempt to read from a closed MaxMind DB."); - return NULL; - } - - MMDB_entry_data_list_s *entry_data_list; - MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list); - MMDB_entry_data_list_s *original_entry_data_list = entry_data_list; - - PyObject *metadata_dict = from_entry_data_list(&entry_data_list); - MMDB_free_entry_data_list(original_entry_data_list); - if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) { - PyErr_SetString(MaxMindDB_error, - "Error decoding metadata."); - return NULL; - } - - PyObject *args = PyTuple_New(0); - if (NULL == args) { - Py_DECREF(metadata_dict); - return NULL; - } - - PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args, - metadata_dict); - - Py_DECREF(metadata_dict); - return metadata; -} - -static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args)) -{ - Reader_obj *mmdb_obj = (Reader_obj *)self; - - if (NULL != mmdb_obj->mmdb) { - MMDB_close(mmdb_obj->mmdb); - free(mmdb_obj->mmdb); - mmdb_obj->mmdb = NULL; - } - - Py_RETURN_NONE; -} - -static void Reader_dealloc(PyObject *self) -{ - Reader_obj *obj = (Reader_obj *)self; - if (NULL != obj->mmdb) { - Reader_close(self, NULL); - } - - PyObject_Del(self); -} - -static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds) -{ - - PyObject - *binary_format_major_version, - *binary_format_minor_version, - *build_epoch, - *database_type, - *description, - *ip_version, - *languages, - *node_count, - *record_size; - - static char *kwlist[] = { - "binary_format_major_version", - "binary_format_minor_version", - "build_epoch", - "database_type", - "description", - "ip_version", - "languages", - "node_count", - "record_size", - NULL - }; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist, - &binary_format_major_version, - &binary_format_minor_version, - &build_epoch, - &database_type, - &description, - &ip_version, - &languages, - &node_count, - &record_size)) { - return -1; - } - - Metadata_obj *obj = (Metadata_obj *)self; - - obj->binary_format_major_version = binary_format_major_version; - obj->binary_format_minor_version = binary_format_minor_version; - obj->build_epoch = build_epoch; - obj->database_type = database_type; - obj->description = description; - obj->ip_version = ip_version; - obj->languages = languages; - obj->node_count = node_count; - obj->record_size = record_size; - - Py_INCREF(obj->binary_format_major_version); - Py_INCREF(obj->binary_format_minor_version); - Py_INCREF(obj->build_epoch); - Py_INCREF(obj->database_type); - Py_INCREF(obj->description); - Py_INCREF(obj->ip_version); - Py_INCREF(obj->languages); - Py_INCREF(obj->node_count); - Py_INCREF(obj->record_size); - - return 0; -} - -static void Metadata_dealloc(PyObject *self) -{ - Metadata_obj *obj = (Metadata_obj *)self; - Py_DECREF(obj->binary_format_major_version); - Py_DECREF(obj->binary_format_minor_version); - Py_DECREF(obj->build_epoch); - Py_DECREF(obj->database_type); - Py_DECREF(obj->description); - Py_DECREF(obj->ip_version); - Py_DECREF(obj->languages); - Py_DECREF(obj->node_count); - Py_DECREF(obj->record_size); - PyObject_Del(self); -} - -static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list) -{ - if (NULL == entry_data_list || NULL == *entry_data_list) { - PyErr_SetString( - MaxMindDB_error, - "Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb." - ); - return NULL; - } - - switch ((*entry_data_list)->entry_data.type) { - case MMDB_DATA_TYPE_MAP: - return from_map(entry_data_list); - case MMDB_DATA_TYPE_ARRAY: - return from_array(entry_data_list); - case MMDB_DATA_TYPE_UTF8_STRING: - return PyUnicode_FromStringAndSize( - (*entry_data_list)->entry_data.utf8_string, - (*entry_data_list)->entry_data.data_size - ); - case MMDB_DATA_TYPE_BYTES: - return PyByteArray_FromStringAndSize( - (const char *)(*entry_data_list)->entry_data.bytes, - (Py_ssize_t)(*entry_data_list)->entry_data.data_size); - case MMDB_DATA_TYPE_DOUBLE: - return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value); - case MMDB_DATA_TYPE_FLOAT: - return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value); - case MMDB_DATA_TYPE_UINT16: - return PyLong_FromLong( (*entry_data_list)->entry_data.uint16); - case MMDB_DATA_TYPE_UINT32: - return PyLong_FromLong((*entry_data_list)->entry_data.uint32); - case MMDB_DATA_TYPE_BOOLEAN: - return PyBool_FromLong((*entry_data_list)->entry_data.boolean); - case MMDB_DATA_TYPE_UINT64: - return PyLong_FromUnsignedLongLong( - (*entry_data_list)->entry_data.uint64); - case MMDB_DATA_TYPE_UINT128: - return from_uint128(*entry_data_list); - case MMDB_DATA_TYPE_INT32: - return PyLong_FromLong((*entry_data_list)->entry_data.int32); - default: - PyErr_Format(MaxMindDB_error, - "Invalid data type arguments: %d", - (*entry_data_list)->entry_data.type); - return NULL; - } - return NULL; -} - -static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list) -{ - PyObject *py_obj = PyDict_New(); - if (NULL == py_obj) { - PyErr_NoMemory(); - return NULL; - } - - const uint32_t map_size = (*entry_data_list)->entry_data.data_size; - - uint i; - // entry_data_list cannot start out NULL (see from_entry_data_list). We - // check it in the loop because it may become NULL. - // coverity[check_after_deref] - for (i = 0; i < map_size && entry_data_list; i++) { - *entry_data_list = (*entry_data_list)->next; - - PyObject *key = PyUnicode_FromStringAndSize( - (char *)(*entry_data_list)->entry_data.utf8_string, - (*entry_data_list)->entry_data.data_size - ); - - *entry_data_list = (*entry_data_list)->next; - - PyObject *value = from_entry_data_list(entry_data_list); - if (NULL == value) { - Py_DECREF(key); - Py_DECREF(py_obj); - return NULL; - } - PyDict_SetItem(py_obj, key, value); - Py_DECREF(value); - Py_DECREF(key); - } - - return py_obj; -} - -static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list) -{ - const uint32_t size = (*entry_data_list)->entry_data.data_size; - - PyObject *py_obj = PyList_New(size); - if (NULL == py_obj) { - PyErr_NoMemory(); - return NULL; - } - - uint i; - // entry_data_list cannot start out NULL (see from_entry_data_list). We - // check it in the loop because it may become NULL. - // coverity[check_after_deref] - for (i = 0; i < size && entry_data_list; i++) { - *entry_data_list = (*entry_data_list)->next; - PyObject *value = from_entry_data_list(entry_data_list); - if (NULL == value) { - Py_DECREF(py_obj); - return NULL; - } - // PyList_SetItem 'steals' the reference - PyList_SetItem(py_obj, i, value); - } - return py_obj; -} - -static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list) -{ - uint64_t high = 0; - uint64_t low = 0; -#if MMDB_UINT128_IS_BYTE_ARRAY - int i; - for (i = 0; i < 8; i++) { - high = (high << 8) | entry_data_list->entry_data.uint128[i]; - } - - for (i = 8; i < 16; i++) { - low = (low << 8) | entry_data_list->entry_data.uint128[i]; - } -#else - high = entry_data_list->entry_data.uint128 >> 64; - low = (uint64_t)entry_data_list->entry_data.uint128; -#endif - - char *num_str = malloc(33); - if (NULL == num_str) { - PyErr_NoMemory(); - return NULL; - } - - snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low); - - PyObject *py_obj = PyLong_FromString(num_str, NULL, 16); - - free(num_str); - return py_obj; -} - -static PyMethodDef Reader_methods[] = { - { "get", Reader_get, METH_VARARGS, - "Get record for IP address" }, - { "metadata", Reader_metadata, METH_NOARGS, - "Returns metadata object for database" }, - { "close", Reader_close, METH_NOARGS, "Closes database"}, - { NULL, NULL, 0, NULL } -}; - -static PyTypeObject Reader_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_basicsize = sizeof(Reader_obj), - .tp_dealloc = Reader_dealloc, - .tp_doc = "Reader object", - .tp_flags = Py_TPFLAGS_DEFAULT, - .tp_methods = Reader_methods, - .tp_name = "Reader", - .tp_init = Reader_init, -}; - -static PyMethodDef Metadata_methods[] = { - { NULL, NULL, 0, NULL } -}; - -/* *INDENT-OFF* */ -static PyMemberDef Metadata_members[] = { - { "binary_format_major_version", T_OBJECT, offsetof( - Metadata_obj, binary_format_major_version), READONLY, NULL }, - { "binary_format_minor_version", T_OBJECT, offsetof( - Metadata_obj, binary_format_minor_version), READONLY, NULL }, - { "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch), - READONLY, NULL }, - { "database_type", T_OBJECT, offsetof(Metadata_obj, database_type), - READONLY, NULL }, - { "description", T_OBJECT, offsetof(Metadata_obj, description), - READONLY, NULL }, - { "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version), - READONLY, NULL }, - { "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY, - NULL }, - { "node_count", T_OBJECT, offsetof(Metadata_obj, node_count), - READONLY, NULL }, - { "record_size", T_OBJECT, offsetof(Metadata_obj, record_size), - READONLY, NULL }, - { NULL, 0, 0, 0, NULL } -}; -/* *INDENT-ON* */ - -static PyTypeObject Metadata_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - .tp_basicsize = sizeof(Metadata_obj), - .tp_dealloc = Metadata_dealloc, - .tp_doc = "Metadata object", - .tp_flags = Py_TPFLAGS_DEFAULT, - .tp_members = Metadata_members, - .tp_methods = Metadata_methods, - .tp_name = "Metadata", - .tp_init = Metadata_init -}; - -static PyMethodDef MaxMindDB_methods[] = { - { NULL, NULL, 0, NULL } -}; - - -#if PY_MAJOR_VERSION >= 3 -static struct PyModuleDef MaxMindDB_module = { - PyModuleDef_HEAD_INIT, - .m_name = "extension", - .m_doc = "This is a C extension to read MaxMind DB file format", - .m_methods = MaxMindDB_methods, -}; -#endif - -MOD_INIT(extension){ - PyObject *m; - -#if PY_MAJOR_VERSION >= 3 - m = PyModule_Create(&MaxMindDB_module); -#else - m = Py_InitModule("extension", MaxMindDB_methods); -#endif - - if (!m) { - RETURN_MOD_INIT(NULL); - } - - Reader_Type.tp_new = PyType_GenericNew; - if (PyType_Ready(&Reader_Type)) { - RETURN_MOD_INIT(NULL); - } - Py_INCREF(&Reader_Type); - PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type); - - Metadata_Type.tp_new = PyType_GenericNew; - if (PyType_Ready(&Metadata_Type)) { - RETURN_MOD_INIT(NULL); - } - PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type); - - PyObject* error_mod = PyImport_ImportModule("maxminddb.errors"); - if (error_mod == NULL) { - RETURN_MOD_INIT(NULL); - } - - MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError"); - Py_DECREF(error_mod); - - if (MaxMindDB_error == NULL) { - RETURN_MOD_INIT(NULL); - } - - Py_INCREF(MaxMindDB_error); - - /* We primarily add it to the module for backwards compatibility */ - PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error); - - RETURN_MOD_INIT(m); -} diff --git a/plugins/Sidebar/maxminddb/file.py b/plugins/Sidebar/maxminddb/file.py deleted file mode 100644 index 3460893e..00000000 --- a/plugins/Sidebar/maxminddb/file.py +++ /dev/null @@ -1,65 +0,0 @@ -"""For internal use only. It provides a slice-like file reader.""" - -import os - -try: - from multiprocessing import Lock -except ImportError: - from threading import Lock - - -class FileBuffer(object): - - """A slice-able file reader""" - - def __init__(self, database): - self._handle = open(database, 'rb') - self._size = os.fstat(self._handle.fileno()).st_size - if not hasattr(os, 'pread'): - self._lock = Lock() - - def __getitem__(self, key): - if isinstance(key, slice): - return self._read(key.stop - key.start, key.start) - elif isinstance(key, int): - return self._read(1, key) - else: - raise TypeError("Invalid argument type.") - - def rfind(self, needle, start): - """Reverse find needle from start""" - pos = self._read(self._size - start - 1, start).rfind(needle) - if pos == -1: - return pos - return start + pos - - def size(self): - """Size of file""" - return self._size - - def close(self): - """Close file""" - self._handle.close() - - if hasattr(os, 'pread'): - - def _read(self, buffersize, offset): - """read that uses pread""" - # pylint: disable=no-member - return os.pread(self._handle.fileno(), buffersize, offset) - - else: - - def _read(self, buffersize, offset): - """read with a lock - - This lock is necessary as after a fork, the different processes - will share the same file table entry, even if we dup the fd, and - as such the same offsets. There does not appear to be a way to - duplicate the file table entry and we cannot re-open based on the - original path as that file may have replaced with another or - unlinked. - """ - with self._lock: - self._handle.seek(offset) - return self._handle.read(buffersize) diff --git a/plugins/Sidebar/maxminddb/ipaddr.py b/plugins/Sidebar/maxminddb/ipaddr.py deleted file mode 100644 index ad27ae9d..00000000 --- a/plugins/Sidebar/maxminddb/ipaddr.py +++ /dev/null @@ -1,1897 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2007 Google Inc. -# Licensed to PSF under a Contributor Agreement. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. See the License for the specific language governing -# permissions and limitations under the License. - -"""A fast, lightweight IPv4/IPv6 manipulation library in Python. - -This library is used to create/poke/manipulate IPv4 and IPv6 addresses -and networks. - -""" - -__version__ = '2.1.10' - -import struct - -IPV4LENGTH = 32 -IPV6LENGTH = 128 - - -class AddressValueError(ValueError): - """A Value Error related to the address.""" - - -class NetmaskValueError(ValueError): - """A Value Error related to the netmask.""" - - -def IPAddress(address, version=None): - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - version: An Integer, 4 or 6. If set, don't try to automatically - determine what the IP address type is. important for things - like IPAddress(1), which could be IPv4, '0.0.0.1', or IPv6, - '::1'. - - Returns: - An IPv4Address or IPv6Address object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. - - """ - if version: - if version == 4: - return IPv4Address(address) - elif version == 6: - return IPv6Address(address) - - try: - return IPv4Address(address) - except (AddressValueError, NetmaskValueError): - pass - - try: - return IPv6Address(address) - except (AddressValueError, NetmaskValueError): - pass - - raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % - address) - - -def IPNetwork(address, version=None, strict=False): - """Take an IP string/int and return an object of the correct type. - - Args: - address: A string or integer, the IP address. Either IPv4 or - IPv6 addresses may be supplied; integers less than 2**32 will - be considered to be IPv4 by default. - version: An Integer, if set, don't try to automatically - determine what the IP address type is. important for things - like IPNetwork(1), which could be IPv4, '0.0.0.1/32', or IPv6, - '::1/128'. - - Returns: - An IPv4Network or IPv6Network object. - - Raises: - ValueError: if the string passed isn't either a v4 or a v6 - address. Or if a strict network was requested and a strict - network wasn't given. - - """ - if version: - if version == 4: - return IPv4Network(address, strict) - elif version == 6: - return IPv6Network(address, strict) - - try: - return IPv4Network(address, strict) - except (AddressValueError, NetmaskValueError): - pass - - try: - return IPv6Network(address, strict) - except (AddressValueError, NetmaskValueError): - pass - - raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % - address) - - -def v4_int_to_packed(address): - """The binary representation of this address. - - Args: - address: An integer representation of an IPv4 IP address. - - Returns: - The binary representation of this address. - - Raises: - ValueError: If the integer is too large to be an IPv4 IP - address. - """ - if address > _BaseV4._ALL_ONES: - raise ValueError('Address too large for IPv4') - return Bytes(struct.pack('!I', address)) - - -def v6_int_to_packed(address): - """The binary representation of this address. - - Args: - address: An integer representation of an IPv4 IP address. - - Returns: - The binary representation of this address. - """ - return Bytes(struct.pack('!QQ', address >> 64, address & (2**64 - 1))) - - -def _find_address_range(addresses): - """Find a sequence of addresses. - - Args: - addresses: a list of IPv4 or IPv6 addresses. - - Returns: - A tuple containing the first and last IP addresses in the sequence. - - """ - first = last = addresses[0] - for ip in addresses[1:]: - if ip._ip == last._ip + 1: - last = ip - else: - break - return (first, last) - -def _get_prefix_length(number1, number2, bits): - """Get the number of leading bits that are same for two numbers. - - Args: - number1: an integer. - number2: another integer. - bits: the maximum number of bits to compare. - - Returns: - The number of leading bits that are the same for two numbers. - - """ - for i in range(bits): - if number1 >> i == number2 >> i: - return bits - i - return 0 - -def _count_righthand_zero_bits(number, bits): - """Count the number of zero bits on the right hand side. - - Args: - number: an integer. - bits: maximum number of bits to count. - - Returns: - The number of zero bits on the right hand side of the number. - - """ - if number == 0: - return bits - for i in range(bits): - if (number >> i) % 2: - return i - -def summarize_address_range(first, last): - """Summarize a network range given the first and last IP addresses. - - Example: - >>> summarize_address_range(IPv4Address('1.1.1.0'), - IPv4Address('1.1.1.130')) - [IPv4Network('1.1.1.0/25'), IPv4Network('1.1.1.128/31'), - IPv4Network('1.1.1.130/32')] - - Args: - first: the first IPv4Address or IPv6Address in the range. - last: the last IPv4Address or IPv6Address in the range. - - Returns: - The address range collapsed to a list of IPv4Network's or - IPv6Network's. - - Raise: - TypeError: - If the first and last objects are not IP addresses. - If the first and last objects are not the same version. - ValueError: - If the last object is not greater than the first. - If the version is not 4 or 6. - - """ - if not (isinstance(first, _BaseIP) and isinstance(last, _BaseIP)): - raise TypeError('first and last must be IP addresses, not networks') - if first.version != last.version: - raise TypeError("%s and %s are not of the same version" % ( - str(first), str(last))) - if first > last: - raise ValueError('last IP address must be greater than first') - - networks = [] - - if first.version == 4: - ip = IPv4Network - elif first.version == 6: - ip = IPv6Network - else: - raise ValueError('unknown IP version') - - ip_bits = first._max_prefixlen - first_int = first._ip - last_int = last._ip - while first_int <= last_int: - nbits = _count_righthand_zero_bits(first_int, ip_bits) - current = None - while nbits >= 0: - addend = 2**nbits - 1 - current = first_int + addend - nbits -= 1 - if current <= last_int: - break - prefix = _get_prefix_length(first_int, current, ip_bits) - net = ip('%s/%d' % (str(first), prefix)) - networks.append(net) - if current == ip._ALL_ONES: - break - first_int = current + 1 - first = IPAddress(first_int, version=first._version) - return networks - -def _collapse_address_list_recursive(addresses): - """Loops through the addresses, collapsing concurrent netblocks. - - Example: - - ip1 = IPv4Network('1.1.0.0/24') - ip2 = IPv4Network('1.1.1.0/24') - ip3 = IPv4Network('1.1.2.0/24') - ip4 = IPv4Network('1.1.3.0/24') - ip5 = IPv4Network('1.1.4.0/24') - ip6 = IPv4Network('1.1.0.1/22') - - _collapse_address_list_recursive([ip1, ip2, ip3, ip4, ip5, ip6]) -> - [IPv4Network('1.1.0.0/22'), IPv4Network('1.1.4.0/24')] - - This shouldn't be called directly; it is called via - collapse_address_list([]). - - Args: - addresses: A list of IPv4Network's or IPv6Network's - - Returns: - A list of IPv4Network's or IPv6Network's depending on what we were - passed. - - """ - ret_array = [] - optimized = False - - for cur_addr in addresses: - if not ret_array: - ret_array.append(cur_addr) - continue - if cur_addr in ret_array[-1]: - optimized = True - elif cur_addr == ret_array[-1].supernet().subnet()[1]: - ret_array.append(ret_array.pop().supernet()) - optimized = True - else: - ret_array.append(cur_addr) - - if optimized: - return _collapse_address_list_recursive(ret_array) - - return ret_array - - -def collapse_address_list(addresses): - """Collapse a list of IP objects. - - Example: - collapse_address_list([IPv4('1.1.0.0/24'), IPv4('1.1.1.0/24')]) -> - [IPv4('1.1.0.0/23')] - - Args: - addresses: A list of IPv4Network or IPv6Network objects. - - Returns: - A list of IPv4Network or IPv6Network objects depending on what we - were passed. - - Raises: - TypeError: If passed a list of mixed version objects. - - """ - i = 0 - addrs = [] - ips = [] - nets = [] - - # split IP addresses and networks - for ip in addresses: - if isinstance(ip, _BaseIP): - if ips and ips[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) - ips.append(ip) - elif ip._prefixlen == ip._max_prefixlen: - if ips and ips[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) - ips.append(ip.ip) - else: - if nets and nets[-1]._version != ip._version: - raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) - nets.append(ip) - - # sort and dedup - ips = sorted(set(ips)) - nets = sorted(set(nets)) - - while i < len(ips): - (first, last) = _find_address_range(ips[i:]) - i = ips.index(last) + 1 - addrs.extend(summarize_address_range(first, last)) - - return _collapse_address_list_recursive(sorted( - addrs + nets, key=_BaseNet._get_networks_key)) - -# backwards compatibility -CollapseAddrList = collapse_address_list - -# We need to distinguish between the string and packed-bytes representations -# of an IP address. For example, b'0::1' is the IPv4 address 48.58.58.49, -# while '0::1' is an IPv6 address. -# -# In Python 3, the native 'bytes' type already provides this functionality, -# so we use it directly. For earlier implementations where bytes is not a -# distinct type, we create a subclass of str to serve as a tag. -# -# Usage example (Python 2): -# ip = ipaddr.IPAddress(ipaddr.Bytes('xxxx')) -# -# Usage example (Python 3): -# ip = ipaddr.IPAddress(b'xxxx') -try: - if bytes is str: - raise TypeError("bytes is not a distinct type") - Bytes = bytes -except (NameError, TypeError): - class Bytes(str): - def __repr__(self): - return 'Bytes(%s)' % str.__repr__(self) - -def get_mixed_type_key(obj): - """Return a key suitable for sorting between networks and addresses. - - Address and Network objects are not sortable by default; they're - fundamentally different so the expression - - IPv4Address('1.1.1.1') <= IPv4Network('1.1.1.1/24') - - doesn't make any sense. There are some times however, where you may wish - to have ipaddr sort these for you anyway. If you need to do this, you - can use this function as the key= argument to sorted(). - - Args: - obj: either a Network or Address object. - Returns: - appropriate key. - - """ - if isinstance(obj, _BaseNet): - return obj._get_networks_key() - elif isinstance(obj, _BaseIP): - return obj._get_address_key() - return NotImplemented - -class _IPAddrBase(object): - - """The mother class.""" - - def __index__(self): - return self._ip - - def __int__(self): - return self._ip - - def __hex__(self): - return hex(self._ip) - - @property - def exploded(self): - """Return the longhand version of the IP address as a string.""" - return self._explode_shorthand_ip_string() - - @property - def compressed(self): - """Return the shorthand version of the IP address as a string.""" - return str(self) - - -class _BaseIP(_IPAddrBase): - - """A generic IP object. - - This IP class contains the version independent methods which are - used by single IP addresses. - - """ - - def __eq__(self, other): - try: - return (self._ip == other._ip - and self._version == other._version) - except AttributeError: - return NotImplemented - - def __ne__(self, other): - eq = self.__eq__(other) - if eq is NotImplemented: - return NotImplemented - return not eq - - def __le__(self, other): - gt = self.__gt__(other) - if gt is NotImplemented: - return NotImplemented - return not gt - - def __ge__(self, other): - lt = self.__lt__(other) - if lt is NotImplemented: - return NotImplemented - return not lt - - def __lt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseIP): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self._ip != other._ip: - return self._ip < other._ip - return False - - def __gt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseIP): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self._ip != other._ip: - return self._ip > other._ip - return False - - # Shorthand for Integer addition and subtraction. This is not - # meant to ever support addition/subtraction of addresses. - def __add__(self, other): - if not isinstance(other, int): - return NotImplemented - return IPAddress(int(self) + other, version=self._version) - - def __sub__(self, other): - if not isinstance(other, int): - return NotImplemented - return IPAddress(int(self) - other, version=self._version) - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, str(self)) - - def __str__(self): - return '%s' % self._string_from_ip_int(self._ip) - - def __hash__(self): - return hash(hex(long(self._ip))) - - def _get_address_key(self): - return (self._version, self) - - @property - def version(self): - raise NotImplementedError('BaseIP has no version') - - -class _BaseNet(_IPAddrBase): - - """A generic IP object. - - This IP class contains the version independent methods which are - used by networks. - - """ - - def __init__(self, address): - self._cache = {} - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, str(self)) - - def iterhosts(self): - """Generate Iterator over usable hosts in a network. - - This is like __iter__ except it doesn't return the network - or broadcast addresses. - - """ - cur = int(self.network) + 1 - bcast = int(self.broadcast) - 1 - while cur <= bcast: - cur += 1 - yield IPAddress(cur - 1, version=self._version) - - def __iter__(self): - cur = int(self.network) - bcast = int(self.broadcast) - while cur <= bcast: - cur += 1 - yield IPAddress(cur - 1, version=self._version) - - def __getitem__(self, n): - network = int(self.network) - broadcast = int(self.broadcast) - if n >= 0: - if network + n > broadcast: - raise IndexError - return IPAddress(network + n, version=self._version) - else: - n += 1 - if broadcast + n < network: - raise IndexError - return IPAddress(broadcast + n, version=self._version) - - def __lt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseNet): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self.network != other.network: - return self.network < other.network - if self.netmask != other.netmask: - return self.netmask < other.netmask - return False - - def __gt__(self, other): - if self._version != other._version: - raise TypeError('%s and %s are not of the same version' % ( - str(self), str(other))) - if not isinstance(other, _BaseNet): - raise TypeError('%s and %s are not of the same type' % ( - str(self), str(other))) - if self.network != other.network: - return self.network > other.network - if self.netmask != other.netmask: - return self.netmask > other.netmask - return False - - def __le__(self, other): - gt = self.__gt__(other) - if gt is NotImplemented: - return NotImplemented - return not gt - - def __ge__(self, other): - lt = self.__lt__(other) - if lt is NotImplemented: - return NotImplemented - return not lt - - def __eq__(self, other): - try: - return (self._version == other._version - and self.network == other.network - and int(self.netmask) == int(other.netmask)) - except AttributeError: - if isinstance(other, _BaseIP): - return (self._version == other._version - and self._ip == other._ip) - - def __ne__(self, other): - eq = self.__eq__(other) - if eq is NotImplemented: - return NotImplemented - return not eq - - def __str__(self): - return '%s/%s' % (str(self.ip), - str(self._prefixlen)) - - def __hash__(self): - return hash(int(self.network) ^ int(self.netmask)) - - def __contains__(self, other): - # always false if one is v4 and the other is v6. - if self._version != other._version: - return False - # dealing with another network. - if isinstance(other, _BaseNet): - return (self.network <= other.network and - self.broadcast >= other.broadcast) - # dealing with another address - else: - return (int(self.network) <= int(other._ip) <= - int(self.broadcast)) - - def overlaps(self, other): - """Tell if self is partly contained in other.""" - return self.network in other or self.broadcast in other or ( - other.network in self or other.broadcast in self) - - @property - def network(self): - x = self._cache.get('network') - if x is None: - x = IPAddress(self._ip & int(self.netmask), version=self._version) - self._cache['network'] = x - return x - - @property - def broadcast(self): - x = self._cache.get('broadcast') - if x is None: - x = IPAddress(self._ip | int(self.hostmask), version=self._version) - self._cache['broadcast'] = x - return x - - @property - def hostmask(self): - x = self._cache.get('hostmask') - if x is None: - x = IPAddress(int(self.netmask) ^ self._ALL_ONES, - version=self._version) - self._cache['hostmask'] = x - return x - - @property - def with_prefixlen(self): - return '%s/%d' % (str(self.ip), self._prefixlen) - - @property - def with_netmask(self): - return '%s/%s' % (str(self.ip), str(self.netmask)) - - @property - def with_hostmask(self): - return '%s/%s' % (str(self.ip), str(self.hostmask)) - - @property - def numhosts(self): - """Number of hosts in the current subnet.""" - return int(self.broadcast) - int(self.network) + 1 - - @property - def version(self): - raise NotImplementedError('BaseNet has no version') - - @property - def prefixlen(self): - return self._prefixlen - - def address_exclude(self, other): - """Remove an address from a larger block. - - For example: - - addr1 = IPNetwork('10.1.1.0/24') - addr2 = IPNetwork('10.1.1.0/26') - addr1.address_exclude(addr2) = - [IPNetwork('10.1.1.64/26'), IPNetwork('10.1.1.128/25')] - - or IPv6: - - addr1 = IPNetwork('::1/32') - addr2 = IPNetwork('::1/128') - addr1.address_exclude(addr2) = [IPNetwork('::0/128'), - IPNetwork('::2/127'), - IPNetwork('::4/126'), - IPNetwork('::8/125'), - ... - IPNetwork('0:0:8000::/33')] - - Args: - other: An IPvXNetwork object of the same type. - - Returns: - A sorted list of IPvXNetwork objects addresses which is self - minus other. - - Raises: - TypeError: If self and other are of difffering address - versions, or if other is not a network object. - ValueError: If other is not completely contained by self. - - """ - if not self._version == other._version: - raise TypeError("%s and %s are not of the same version" % ( - str(self), str(other))) - - if not isinstance(other, _BaseNet): - raise TypeError("%s is not a network object" % str(other)) - - if other not in self: - raise ValueError('%s not contained in %s' % (str(other), - str(self))) - if other == self: - return [] - - ret_addrs = [] - - # Make sure we're comparing the network of other. - other = IPNetwork('%s/%s' % (str(other.network), str(other.prefixlen)), - version=other._version) - - s1, s2 = self.subnet() - while s1 != other and s2 != other: - if other in s1: - ret_addrs.append(s2) - s1, s2 = s1.subnet() - elif other in s2: - ret_addrs.append(s1) - s1, s2 = s2.subnet() - else: - # If we got here, there's a bug somewhere. - assert True == False, ('Error performing exclusion: ' - 's1: %s s2: %s other: %s' % - (str(s1), str(s2), str(other))) - if s1 == other: - ret_addrs.append(s2) - elif s2 == other: - ret_addrs.append(s1) - else: - # If we got here, there's a bug somewhere. - assert True == False, ('Error performing exclusion: ' - 's1: %s s2: %s other: %s' % - (str(s1), str(s2), str(other))) - - return sorted(ret_addrs, key=_BaseNet._get_networks_key) - - def compare_networks(self, other): - """Compare two IP objects. - - This is only concerned about the comparison of the integer - representation of the network addresses. This means that the - host bits aren't considered at all in this method. If you want - to compare host bits, you can easily enough do a - 'HostA._ip < HostB._ip' - - Args: - other: An IP object. - - Returns: - If the IP versions of self and other are the same, returns: - - -1 if self < other: - eg: IPv4('1.1.1.0/24') < IPv4('1.1.2.0/24') - IPv6('1080::200C:417A') < IPv6('1080::200B:417B') - 0 if self == other - eg: IPv4('1.1.1.1/24') == IPv4('1.1.1.2/24') - IPv6('1080::200C:417A/96') == IPv6('1080::200C:417B/96') - 1 if self > other - eg: IPv4('1.1.1.0/24') > IPv4('1.1.0.0/24') - IPv6('1080::1:200C:417A/112') > - IPv6('1080::0:200C:417A/112') - - If the IP versions of self and other are different, returns: - - -1 if self._version < other._version - eg: IPv4('10.0.0.1/24') < IPv6('::1/128') - 1 if self._version > other._version - eg: IPv6('::1/128') > IPv4('255.255.255.0/24') - - """ - if self._version < other._version: - return -1 - if self._version > other._version: - return 1 - # self._version == other._version below here: - if self.network < other.network: - return -1 - if self.network > other.network: - return 1 - # self.network == other.network below here: - if self.netmask < other.netmask: - return -1 - if self.netmask > other.netmask: - return 1 - # self.network == other.network and self.netmask == other.netmask - return 0 - - def _get_networks_key(self): - """Network-only key function. - - Returns an object that identifies this address' network and - netmask. This function is a suitable "key" argument for sorted() - and list.sort(). - - """ - return (self._version, self.network, self.netmask) - - def _ip_int_from_prefix(self, prefixlen=None): - """Turn the prefix length netmask into a int for comparison. - - Args: - prefixlen: An integer, the prefix length. - - Returns: - An integer. - - """ - if not prefixlen and prefixlen != 0: - prefixlen = self._prefixlen - return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen) - - def _prefix_from_ip_int(self, ip_int, mask=32): - """Return prefix length from the decimal netmask. - - Args: - ip_int: An integer, the IP address. - mask: The netmask. Defaults to 32. - - Returns: - An integer, the prefix length. - - """ - while mask: - if ip_int & 1 == 1: - break - ip_int >>= 1 - mask -= 1 - - return mask - - def _ip_string_from_prefix(self, prefixlen=None): - """Turn a prefix length into a dotted decimal string. - - Args: - prefixlen: An integer, the netmask prefix length. - - Returns: - A string, the dotted decimal netmask string. - - """ - if not prefixlen: - prefixlen = self._prefixlen - return self._string_from_ip_int(self._ip_int_from_prefix(prefixlen)) - - def iter_subnets(self, prefixlen_diff=1, new_prefix=None): - """The subnets which join to make the current subnet. - - In the case that self contains only one IP - (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 - for IPv6), return a list with just ourself. - - Args: - prefixlen_diff: An integer, the amount the prefix length - should be increased by. This should not be set if - new_prefix is also set. - new_prefix: The desired new prefix length. This must be a - larger number (smaller prefix) than the existing prefix. - This should not be set if prefixlen_diff is also set. - - Returns: - An iterator of IPv(4|6) objects. - - Raises: - ValueError: The prefixlen_diff is too small or too large. - OR - prefixlen_diff and new_prefix are both set or new_prefix - is a smaller number than the current prefix (smaller - number means a larger network) - - """ - if self._prefixlen == self._max_prefixlen: - yield self - return - - if new_prefix is not None: - if new_prefix < self._prefixlen: - raise ValueError('new prefix must be longer') - if prefixlen_diff != 1: - raise ValueError('cannot set prefixlen_diff and new_prefix') - prefixlen_diff = new_prefix - self._prefixlen - - if prefixlen_diff < 0: - raise ValueError('prefix length diff must be > 0') - new_prefixlen = self._prefixlen + prefixlen_diff - - if not self._is_valid_netmask(str(new_prefixlen)): - raise ValueError( - 'prefix length diff %d is invalid for netblock %s' % ( - new_prefixlen, str(self))) - - first = IPNetwork('%s/%s' % (str(self.network), - str(self._prefixlen + prefixlen_diff)), - version=self._version) - - yield first - current = first - while True: - broadcast = current.broadcast - if broadcast == self.broadcast: - return - new_addr = IPAddress(int(broadcast) + 1, version=self._version) - current = IPNetwork('%s/%s' % (str(new_addr), str(new_prefixlen)), - version=self._version) - - yield current - - def masked(self): - """Return the network object with the host bits masked out.""" - return IPNetwork('%s/%d' % (self.network, self._prefixlen), - version=self._version) - - def subnet(self, prefixlen_diff=1, new_prefix=None): - """Return a list of subnets, rather than an iterator.""" - return list(self.iter_subnets(prefixlen_diff, new_prefix)) - - def supernet(self, prefixlen_diff=1, new_prefix=None): - """The supernet containing the current network. - - Args: - prefixlen_diff: An integer, the amount the prefix length of - the network should be decreased by. For example, given a - /24 network and a prefixlen_diff of 3, a supernet with a - /21 netmask is returned. - - Returns: - An IPv4 network object. - - Raises: - ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a - negative prefix length. - OR - If prefixlen_diff and new_prefix are both set or new_prefix is a - larger number than the current prefix (larger number means a - smaller network) - - """ - if self._prefixlen == 0: - return self - - if new_prefix is not None: - if new_prefix > self._prefixlen: - raise ValueError('new prefix must be shorter') - if prefixlen_diff != 1: - raise ValueError('cannot set prefixlen_diff and new_prefix') - prefixlen_diff = self._prefixlen - new_prefix - - - if self.prefixlen - prefixlen_diff < 0: - raise ValueError( - 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % - (self.prefixlen, prefixlen_diff)) - return IPNetwork('%s/%s' % (str(self.network), - str(self.prefixlen - prefixlen_diff)), - version=self._version) - - # backwards compatibility - Subnet = subnet - Supernet = supernet - AddressExclude = address_exclude - CompareNetworks = compare_networks - Contains = __contains__ - - -class _BaseV4(object): - - """Base IPv4 object. - - The following methods are used by IPv4 objects in both single IP - addresses and networks. - - """ - - # Equivalent to 255.255.255.255 or 32 bits of 1's. - _ALL_ONES = (2**IPV4LENGTH) - 1 - _DECIMAL_DIGITS = frozenset('0123456789') - - def __init__(self, address): - self._version = 4 - self._max_prefixlen = IPV4LENGTH - - def _explode_shorthand_ip_string(self): - return str(self) - - def _ip_int_from_string(self, ip_str): - """Turn the given IP string into an integer for comparison. - - Args: - ip_str: A string, the IP ip_str. - - Returns: - The IP ip_str as an integer. - - Raises: - AddressValueError: if ip_str isn't a valid IPv4 Address. - - """ - octets = ip_str.split('.') - if len(octets) != 4: - raise AddressValueError(ip_str) - - packed_ip = 0 - for oc in octets: - try: - packed_ip = (packed_ip << 8) | self._parse_octet(oc) - except ValueError: - raise AddressValueError(ip_str) - return packed_ip - - def _parse_octet(self, octet_str): - """Convert a decimal octet into an integer. - - Args: - octet_str: A string, the number to parse. - - Returns: - The octet as an integer. - - Raises: - ValueError: if the octet isn't strictly a decimal from [0..255]. - - """ - # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not self._DECIMAL_DIGITS.issuperset(octet_str): - raise ValueError - octet_int = int(octet_str, 10) - # Disallow leading zeroes, because no clear standard exists on - # whether these should be interpreted as decimal or octal. - if octet_int > 255 or (octet_str[0] == '0' and len(octet_str) > 1): - raise ValueError - return octet_int - - def _string_from_ip_int(self, ip_int): - """Turns a 32-bit integer into dotted decimal notation. - - Args: - ip_int: An integer, the IP address. - - Returns: - The IP address as a string in dotted decimal notation. - - """ - octets = [] - for _ in xrange(4): - octets.insert(0, str(ip_int & 0xFF)) - ip_int >>= 8 - return '.'.join(octets) - - @property - def max_prefixlen(self): - return self._max_prefixlen - - @property - def packed(self): - """The binary representation of this address.""" - return v4_int_to_packed(self._ip) - - @property - def version(self): - return self._version - - @property - def is_reserved(self): - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within the - reserved IPv4 Network range. - - """ - return self in IPv4Network('240.0.0.0/4') - - @property - def is_private(self): - """Test if this address is allocated for private networks. - - Returns: - A boolean, True if the address is reserved per RFC 1918. - - """ - return (self in IPv4Network('10.0.0.0/8') or - self in IPv4Network('172.16.0.0/12') or - self in IPv4Network('192.168.0.0/16')) - - @property - def is_multicast(self): - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is multicast. - See RFC 3171 for details. - - """ - return self in IPv4Network('224.0.0.0/4') - - @property - def is_unspecified(self): - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 5735 3. - - """ - return self in IPv4Network('0.0.0.0') - - @property - def is_loopback(self): - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback per RFC 3330. - - """ - return self in IPv4Network('127.0.0.0/8') - - @property - def is_link_local(self): - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is link-local per RFC 3927. - - """ - return self in IPv4Network('169.254.0.0/16') - - -class IPv4Address(_BaseV4, _BaseIP): - - """Represent and manipulate single IPv4 Addresses.""" - - def __init__(self, address): - - """ - Args: - address: A string or integer representing the IP - '192.168.1.1' - - Additionally, an integer can be passed, so - IPv4Address('192.168.1.1') == IPv4Address(3232235777). - or, more generally - IPv4Address(int(IPv4Address('192.168.1.1'))) == - IPv4Address('192.168.1.1') - - Raises: - AddressValueError: If ipaddr isn't a valid IPv4 address. - - """ - _BaseV4.__init__(self, address) - - # Efficient constructor from integer. - if isinstance(address, (int, long)): - self._ip = address - if address < 0 or address > self._ALL_ONES: - raise AddressValueError(address) - return - - # Constructing from a packed address - if isinstance(address, Bytes): - try: - self._ip, = struct.unpack('!I', address) - except struct.error: - raise AddressValueError(address) # Wrong length. - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP string. - addr_str = str(address) - self._ip = self._ip_int_from_string(addr_str) - - -class IPv4Network(_BaseV4, _BaseNet): - - """This class represents and manipulates 32-bit IPv4 networks. - - Attributes: [examples for IPv4Network('1.2.3.4/27')] - ._ip: 16909060 - .ip: IPv4Address('1.2.3.4') - .network: IPv4Address('1.2.3.0') - .hostmask: IPv4Address('0.0.0.31') - .broadcast: IPv4Address('1.2.3.31') - .netmask: IPv4Address('255.255.255.224') - .prefixlen: 27 - - """ - - # the valid octets for host and netmasks. only useful for IPv4. - _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0)) - - def __init__(self, address, strict=False): - """Instantiate a new IPv4 network object. - - Args: - address: A string or integer representing the IP [& network]. - '192.168.1.1/24' - '192.168.1.1/255.255.255.0' - '192.168.1.1/0.0.0.255' - are all functionally the same in IPv4. Similarly, - '192.168.1.1' - '192.168.1.1/255.255.255.255' - '192.168.1.1/32' - are also functionaly equivalent. That is to say, failing to - provide a subnetmask will create an object with a mask of /32. - - If the mask (portion after the / in the argument) is given in - dotted quad form, it is treated as a netmask if it starts with a - non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it - starts with a zero field (e.g. 0.255.255.255 == /8), with the - single exception of an all-zero mask which is treated as a - netmask == /0. If no mask is given, a default of /32 is used. - - Additionally, an integer can be passed, so - IPv4Network('192.168.1.1') == IPv4Network(3232235777). - or, more generally - IPv4Network(int(IPv4Network('192.168.1.1'))) == - IPv4Network('192.168.1.1') - - strict: A boolean. If true, ensure that we have been passed - A true network address, eg, 192.168.1.0/24 and not an - IP address on a network, eg, 192.168.1.1/24. - - Raises: - AddressValueError: If ipaddr isn't a valid IPv4 address. - NetmaskValueError: If the netmask isn't valid for - an IPv4 address. - ValueError: If strict was True and a network address was not - supplied. - - """ - _BaseNet.__init__(self, address) - _BaseV4.__init__(self, address) - - # Constructing from an integer or packed bytes. - if isinstance(address, (int, long, Bytes)): - self.ip = IPv4Address(address) - self._ip = self.ip._ip - self._prefixlen = self._max_prefixlen - self.netmask = IPv4Address(self._ALL_ONES) - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - addr = str(address).split('/') - - if len(addr) > 2: - raise AddressValueError(address) - - self._ip = self._ip_int_from_string(addr[0]) - self.ip = IPv4Address(self._ip) - - if len(addr) == 2: - mask = addr[1].split('.') - if len(mask) == 4: - # We have dotted decimal netmask. - if self._is_valid_netmask(addr[1]): - self.netmask = IPv4Address(self._ip_int_from_string( - addr[1])) - elif self._is_hostmask(addr[1]): - self.netmask = IPv4Address( - self._ip_int_from_string(addr[1]) ^ self._ALL_ONES) - else: - raise NetmaskValueError('%s is not a valid netmask' - % addr[1]) - - self._prefixlen = self._prefix_from_ip_int(int(self.netmask)) - else: - # We have a netmask in prefix length form. - if not self._is_valid_netmask(addr[1]): - raise NetmaskValueError(addr[1]) - self._prefixlen = int(addr[1]) - self.netmask = IPv4Address(self._ip_int_from_prefix( - self._prefixlen)) - else: - self._prefixlen = self._max_prefixlen - self.netmask = IPv4Address(self._ip_int_from_prefix( - self._prefixlen)) - if strict: - if self.ip != self.network: - raise ValueError('%s has host bits set' % - self.ip) - if self._prefixlen == (self._max_prefixlen - 1): - self.iterhosts = self.__iter__ - - def _is_hostmask(self, ip_str): - """Test if the IP string is a hostmask (rather than a netmask). - - Args: - ip_str: A string, the potential hostmask. - - Returns: - A boolean, True if the IP string is a hostmask. - - """ - bits = ip_str.split('.') - try: - parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] - except ValueError: - return False - if len(parts) != len(bits): - return False - if parts[0] < parts[-1]: - return True - return False - - def _is_valid_netmask(self, netmask): - """Verify that the netmask is valid. - - Args: - netmask: A string, either a prefix or dotted decimal - netmask. - - Returns: - A boolean, True if the prefix represents a valid IPv4 - netmask. - - """ - mask = netmask.split('.') - if len(mask) == 4: - if [x for x in mask if int(x) not in self._valid_mask_octets]: - return False - if [y for idx, y in enumerate(mask) if idx > 0 and - y > mask[idx - 1]]: - return False - return True - try: - netmask = int(netmask) - except ValueError: - return False - return 0 <= netmask <= self._max_prefixlen - - # backwards compatibility - IsRFC1918 = lambda self: self.is_private - IsMulticast = lambda self: self.is_multicast - IsLoopback = lambda self: self.is_loopback - IsLinkLocal = lambda self: self.is_link_local - - -class _BaseV6(object): - - """Base IPv6 object. - - The following methods are used by IPv6 objects in both single IP - addresses and networks. - - """ - - _ALL_ONES = (2**IPV6LENGTH) - 1 - _HEXTET_COUNT = 8 - _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') - - def __init__(self, address): - self._version = 6 - self._max_prefixlen = IPV6LENGTH - - def _ip_int_from_string(self, ip_str): - """Turn an IPv6 ip_str into an integer. - - Args: - ip_str: A string, the IPv6 ip_str. - - Returns: - A long, the IPv6 ip_str. - - Raises: - AddressValueError: if ip_str isn't a valid IPv6 Address. - - """ - parts = ip_str.split(':') - - # An IPv6 address needs at least 2 colons (3 parts). - if len(parts) < 3: - raise AddressValueError(ip_str) - - # If the address has an IPv4-style suffix, convert it to hexadecimal. - if '.' in parts[-1]: - ipv4_int = IPv4Address(parts.pop())._ip - parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) - parts.append('%x' % (ipv4_int & 0xFFFF)) - - # An IPv6 address can't have more than 8 colons (9 parts). - if len(parts) > self._HEXTET_COUNT + 1: - raise AddressValueError(ip_str) - - # Disregarding the endpoints, find '::' with nothing in between. - # This indicates that a run of zeroes has been skipped. - try: - skip_index, = ( - [i for i in xrange(1, len(parts) - 1) if not parts[i]] or - [None]) - except ValueError: - # Can't have more than one '::' - raise AddressValueError(ip_str) - - # parts_hi is the number of parts to copy from above/before the '::' - # parts_lo is the number of parts to copy from below/after the '::' - if skip_index is not None: - # If we found a '::', then check if it also covers the endpoints. - parts_hi = skip_index - parts_lo = len(parts) - skip_index - 1 - if not parts[0]: - parts_hi -= 1 - if parts_hi: - raise AddressValueError(ip_str) # ^: requires ^:: - if not parts[-1]: - parts_lo -= 1 - if parts_lo: - raise AddressValueError(ip_str) # :$ requires ::$ - parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo) - if parts_skipped < 1: - raise AddressValueError(ip_str) - else: - # Otherwise, allocate the entire address to parts_hi. The endpoints - # could still be empty, but _parse_hextet() will check for that. - if len(parts) != self._HEXTET_COUNT: - raise AddressValueError(ip_str) - parts_hi = len(parts) - parts_lo = 0 - parts_skipped = 0 - - try: - # Now, parse the hextets into a 128-bit integer. - ip_int = 0L - for i in xrange(parts_hi): - ip_int <<= 16 - ip_int |= self._parse_hextet(parts[i]) - ip_int <<= 16 * parts_skipped - for i in xrange(-parts_lo, 0): - ip_int <<= 16 - ip_int |= self._parse_hextet(parts[i]) - return ip_int - except ValueError: - raise AddressValueError(ip_str) - - def _parse_hextet(self, hextet_str): - """Convert an IPv6 hextet string into an integer. - - Args: - hextet_str: A string, the number to parse. - - Returns: - The hextet as an integer. - - Raises: - ValueError: if the input isn't strictly a hex number from [0..FFFF]. - - """ - # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not self._HEX_DIGITS.issuperset(hextet_str): - raise ValueError - hextet_int = int(hextet_str, 16) - if hextet_int > 0xFFFF: - raise ValueError - return hextet_int - - def _compress_hextets(self, hextets): - """Compresses a list of hextets. - - Compresses a list of strings, replacing the longest continuous - sequence of "0" in the list with "" and adding empty strings at - the beginning or at the end of the string such that subsequently - calling ":".join(hextets) will produce the compressed version of - the IPv6 address. - - Args: - hextets: A list of strings, the hextets to compress. - - Returns: - A list of strings. - - """ - best_doublecolon_start = -1 - best_doublecolon_len = 0 - doublecolon_start = -1 - doublecolon_len = 0 - for index in range(len(hextets)): - if hextets[index] == '0': - doublecolon_len += 1 - if doublecolon_start == -1: - # Start of a sequence of zeros. - doublecolon_start = index - if doublecolon_len > best_doublecolon_len: - # This is the longest sequence of zeros so far. - best_doublecolon_len = doublecolon_len - best_doublecolon_start = doublecolon_start - else: - doublecolon_len = 0 - doublecolon_start = -1 - - if best_doublecolon_len > 1: - best_doublecolon_end = (best_doublecolon_start + - best_doublecolon_len) - # For zeros at the end of the address. - if best_doublecolon_end == len(hextets): - hextets += [''] - hextets[best_doublecolon_start:best_doublecolon_end] = [''] - # For zeros at the beginning of the address. - if best_doublecolon_start == 0: - hextets = [''] + hextets - - return hextets - - def _string_from_ip_int(self, ip_int=None): - """Turns a 128-bit integer into hexadecimal notation. - - Args: - ip_int: An integer, the IP address. - - Returns: - A string, the hexadecimal representation of the address. - - Raises: - ValueError: The address is bigger than 128 bits of all ones. - - """ - if not ip_int and ip_int != 0: - ip_int = int(self._ip) - - if ip_int > self._ALL_ONES: - raise ValueError('IPv6 address is too large') - - hex_str = '%032x' % ip_int - hextets = [] - for x in range(0, 32, 4): - hextets.append('%x' % int(hex_str[x:x+4], 16)) - - hextets = self._compress_hextets(hextets) - return ':'.join(hextets) - - def _explode_shorthand_ip_string(self): - """Expand a shortened IPv6 address. - - Args: - ip_str: A string, the IPv6 address. - - Returns: - A string, the expanded IPv6 address. - - """ - if isinstance(self, _BaseNet): - ip_str = str(self.ip) - else: - ip_str = str(self) - - ip_int = self._ip_int_from_string(ip_str) - parts = [] - for i in xrange(self._HEXTET_COUNT): - parts.append('%04x' % (ip_int & 0xFFFF)) - ip_int >>= 16 - parts.reverse() - if isinstance(self, _BaseNet): - return '%s/%d' % (':'.join(parts), self.prefixlen) - return ':'.join(parts) - - @property - def max_prefixlen(self): - return self._max_prefixlen - - @property - def packed(self): - """The binary representation of this address.""" - return v6_int_to_packed(self._ip) - - @property - def version(self): - return self._version - - @property - def is_multicast(self): - """Test if the address is reserved for multicast use. - - Returns: - A boolean, True if the address is a multicast address. - See RFC 2373 2.7 for details. - - """ - return self in IPv6Network('ff00::/8') - - @property - def is_reserved(self): - """Test if the address is otherwise IETF reserved. - - Returns: - A boolean, True if the address is within one of the - reserved IPv6 Network ranges. - - """ - return (self in IPv6Network('::/8') or - self in IPv6Network('100::/8') or - self in IPv6Network('200::/7') or - self in IPv6Network('400::/6') or - self in IPv6Network('800::/5') or - self in IPv6Network('1000::/4') or - self in IPv6Network('4000::/3') or - self in IPv6Network('6000::/3') or - self in IPv6Network('8000::/3') or - self in IPv6Network('A000::/3') or - self in IPv6Network('C000::/3') or - self in IPv6Network('E000::/4') or - self in IPv6Network('F000::/5') or - self in IPv6Network('F800::/6') or - self in IPv6Network('FE00::/9')) - - @property - def is_unspecified(self): - """Test if the address is unspecified. - - Returns: - A boolean, True if this is the unspecified address as defined in - RFC 2373 2.5.2. - - """ - return self._ip == 0 and getattr(self, '_prefixlen', 128) == 128 - - @property - def is_loopback(self): - """Test if the address is a loopback address. - - Returns: - A boolean, True if the address is a loopback address as defined in - RFC 2373 2.5.3. - - """ - return self._ip == 1 and getattr(self, '_prefixlen', 128) == 128 - - @property - def is_link_local(self): - """Test if the address is reserved for link-local. - - Returns: - A boolean, True if the address is reserved per RFC 4291. - - """ - return self in IPv6Network('fe80::/10') - - @property - def is_site_local(self): - """Test if the address is reserved for site-local. - - Note that the site-local address space has been deprecated by RFC 3879. - Use is_private to test if this address is in the space of unique local - addresses as defined by RFC 4193. - - Returns: - A boolean, True if the address is reserved per RFC 3513 2.5.6. - - """ - return self in IPv6Network('fec0::/10') - - @property - def is_private(self): - """Test if this address is allocated for private networks. - - Returns: - A boolean, True if the address is reserved per RFC 4193. - - """ - return self in IPv6Network('fc00::/7') - - @property - def ipv4_mapped(self): - """Return the IPv4 mapped address. - - Returns: - If the IPv6 address is a v4 mapped address, return the - IPv4 mapped address. Return None otherwise. - - """ - if (self._ip >> 32) != 0xFFFF: - return None - return IPv4Address(self._ip & 0xFFFFFFFF) - - @property - def teredo(self): - """Tuple of embedded teredo IPs. - - Returns: - Tuple of the (server, client) IPs or None if the address - doesn't appear to be a teredo address (doesn't start with - 2001::/32) - - """ - if (self._ip >> 96) != 0x20010000: - return None - return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), - IPv4Address(~self._ip & 0xFFFFFFFF)) - - @property - def sixtofour(self): - """Return the IPv4 6to4 embedded address. - - Returns: - The IPv4 6to4-embedded address if present or None if the - address doesn't appear to contain a 6to4 embedded address. - - """ - if (self._ip >> 112) != 0x2002: - return None - return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) - - -class IPv6Address(_BaseV6, _BaseIP): - - """Represent and manipulate single IPv6 Addresses. - """ - - def __init__(self, address): - """Instantiate a new IPv6 address object. - - Args: - address: A string or integer representing the IP - - Additionally, an integer can be passed, so - IPv6Address('2001:4860::') == - IPv6Address(42541956101370907050197289607612071936L). - or, more generally - IPv6Address(IPv6Address('2001:4860::')._ip) == - IPv6Address('2001:4860::') - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - - """ - _BaseV6.__init__(self, address) - - # Efficient constructor from integer. - if isinstance(address, (int, long)): - self._ip = address - if address < 0 or address > self._ALL_ONES: - raise AddressValueError(address) - return - - # Constructing from a packed address - if isinstance(address, Bytes): - try: - hi, lo = struct.unpack('!QQ', address) - except struct.error: - raise AddressValueError(address) # Wrong length. - self._ip = (hi << 64) | lo - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP string. - addr_str = str(address) - if not addr_str: - raise AddressValueError('') - - self._ip = self._ip_int_from_string(addr_str) - - -class IPv6Network(_BaseV6, _BaseNet): - - """This class represents and manipulates 128-bit IPv6 networks. - - Attributes: [examples for IPv6('2001:658:22A:CAFE:200::1/64')] - .ip: IPv6Address('2001:658:22a:cafe:200::1') - .network: IPv6Address('2001:658:22a:cafe::') - .hostmask: IPv6Address('::ffff:ffff:ffff:ffff') - .broadcast: IPv6Address('2001:658:22a:cafe:ffff:ffff:ffff:ffff') - .netmask: IPv6Address('ffff:ffff:ffff:ffff::') - .prefixlen: 64 - - """ - - - def __init__(self, address, strict=False): - """Instantiate a new IPv6 Network object. - - Args: - address: A string or integer representing the IPv6 network or the IP - and prefix/netmask. - '2001:4860::/128' - '2001:4860:0000:0000:0000:0000:0000:0000/128' - '2001:4860::' - are all functionally the same in IPv6. That is to say, - failing to provide a subnetmask will create an object with - a mask of /128. - - Additionally, an integer can be passed, so - IPv6Network('2001:4860::') == - IPv6Network(42541956101370907050197289607612071936L). - or, more generally - IPv6Network(IPv6Network('2001:4860::')._ip) == - IPv6Network('2001:4860::') - - strict: A boolean. If true, ensure that we have been passed - A true network address, eg, 192.168.1.0/24 and not an - IP address on a network, eg, 192.168.1.1/24. - - Raises: - AddressValueError: If address isn't a valid IPv6 address. - NetmaskValueError: If the netmask isn't valid for - an IPv6 address. - ValueError: If strict was True and a network address was not - supplied. - - """ - _BaseNet.__init__(self, address) - _BaseV6.__init__(self, address) - - # Constructing from an integer or packed bytes. - if isinstance(address, (int, long, Bytes)): - self.ip = IPv6Address(address) - self._ip = self.ip._ip - self._prefixlen = self._max_prefixlen - self.netmask = IPv6Address(self._ALL_ONES) - return - - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - addr = str(address).split('/') - - if len(addr) > 2: - raise AddressValueError(address) - - self._ip = self._ip_int_from_string(addr[0]) - self.ip = IPv6Address(self._ip) - - if len(addr) == 2: - if self._is_valid_netmask(addr[1]): - self._prefixlen = int(addr[1]) - else: - raise NetmaskValueError(addr[1]) - else: - self._prefixlen = self._max_prefixlen - - self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen)) - - if strict: - if self.ip != self.network: - raise ValueError('%s has host bits set' % - self.ip) - if self._prefixlen == (self._max_prefixlen - 1): - self.iterhosts = self.__iter__ - - def _is_valid_netmask(self, prefixlen): - """Verify that the netmask/prefixlen is valid. - - Args: - prefixlen: A string, the netmask in prefix length format. - - Returns: - A boolean, True if the prefix represents a valid IPv6 - netmask. - - """ - try: - prefixlen = int(prefixlen) - except ValueError: - return False - return 0 <= prefixlen <= self._max_prefixlen - - @property - def with_netmask(self): - return self.with_prefixlen diff --git a/plugins/Sidebar/maxminddb/reader.py b/plugins/Sidebar/maxminddb/reader.py deleted file mode 100644 index 5ecfbdf2..00000000 --- a/plugins/Sidebar/maxminddb/reader.py +++ /dev/null @@ -1,221 +0,0 @@ -""" -maxminddb.reader -~~~~~~~~~~~~~~~~ - -This module contains the pure Python database reader and related classes. - -""" -from __future__ import unicode_literals - -try: - import mmap -except ImportError: - # pylint: disable=invalid-name - mmap = None - -import struct - -from maxminddb.compat import byte_from_int, int_from_byte, ipaddress -from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY -from maxminddb.decoder import Decoder -from maxminddb.errors import InvalidDatabaseError -from maxminddb.file import FileBuffer - - -class Reader(object): - - """ - Instances of this class provide a reader for the MaxMind DB format. IP - addresses can be looked up using the ``get`` method. - """ - - _DATA_SECTION_SEPARATOR_SIZE = 16 - _METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com" - - _ipv4_start = None - - def __init__(self, database, mode=MODE_AUTO): - """Reader for the MaxMind DB file format - - Arguments: - database -- A path to a valid MaxMind DB file such as a GeoIP2 - database file. - mode -- mode to open the database with. Valid mode are: - * MODE_MMAP - read from memory map. - * MODE_FILE - read database as standard file. - * MODE_MEMORY - load database into memory. - * MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default. - """ - if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP: - with open(database, 'rb') as db_file: - self._buffer = mmap.mmap( - db_file.fileno(), 0, access=mmap.ACCESS_READ) - self._buffer_size = self._buffer.size() - elif mode in (MODE_AUTO, MODE_FILE): - self._buffer = FileBuffer(database) - self._buffer_size = self._buffer.size() - elif mode == MODE_MEMORY: - with open(database, 'rb') as db_file: - self._buffer = db_file.read() - self._buffer_size = len(self._buffer) - else: - raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, ' - ' MODE_FILE, and MODE_MEMORY are support by the pure Python ' - 'Reader'.format(mode)) - - metadata_start = self._buffer.rfind(self._METADATA_START_MARKER, - max(0, self._buffer_size - - 128 * 1024)) - - if metadata_start == -1: - self.close() - raise InvalidDatabaseError('Error opening database file ({0}). ' - 'Is this a valid MaxMind DB file?' - ''.format(database)) - - metadata_start += len(self._METADATA_START_MARKER) - metadata_decoder = Decoder(self._buffer, metadata_start) - (metadata, _) = metadata_decoder.decode(metadata_start) - self._metadata = Metadata( - **metadata) # pylint: disable=bad-option-value - - self._decoder = Decoder(self._buffer, self._metadata.search_tree_size - + self._DATA_SECTION_SEPARATOR_SIZE) - - def metadata(self): - """Return the metadata associated with the MaxMind DB file""" - return self._metadata - - def get(self, ip_address): - """Return the record for the ip_address in the MaxMind DB - - - Arguments: - ip_address -- an IP address in the standard string notation - """ - address = ipaddress.ip_address(ip_address) - - if address.version == 6 and self._metadata.ip_version == 4: - raise ValueError('Error looking up {0}. You attempted to look up ' - 'an IPv6 address in an IPv4-only database.'.format( - ip_address)) - pointer = self._find_address_in_tree(address) - - return self._resolve_data_pointer(pointer) if pointer else None - - def _find_address_in_tree(self, ip_address): - packed = ip_address.packed - - bit_count = len(packed) * 8 - node = self._start_node(bit_count) - - for i in range(bit_count): - if node >= self._metadata.node_count: - break - bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8)) - node = self._read_node(node, bit) - if node == self._metadata.node_count: - # Record is empty - return 0 - elif node > self._metadata.node_count: - return node - - raise InvalidDatabaseError('Invalid node in search tree') - - def _start_node(self, length): - if self._metadata.ip_version != 6 or length == 128: - return 0 - - # We are looking up an IPv4 address in an IPv6 tree. Skip over the - # first 96 nodes. - if self._ipv4_start: - return self._ipv4_start - - node = 0 - for _ in range(96): - if node >= self._metadata.node_count: - break - node = self._read_node(node, 0) - self._ipv4_start = node - return node - - def _read_node(self, node_number, index): - base_offset = node_number * self._metadata.node_byte_size - - record_size = self._metadata.record_size - if record_size == 24: - offset = base_offset + index * 3 - node_bytes = b'\x00' + self._buffer[offset:offset + 3] - elif record_size == 28: - (middle,) = struct.unpack( - b'!B', self._buffer[base_offset + 3:base_offset + 4]) - if index: - middle &= 0x0F - else: - middle = (0xF0 & middle) >> 4 - offset = base_offset + index * 4 - node_bytes = byte_from_int( - middle) + self._buffer[offset:offset + 3] - elif record_size == 32: - offset = base_offset + index * 4 - node_bytes = self._buffer[offset:offset + 4] - else: - raise InvalidDatabaseError( - 'Unknown record size: {0}'.format(record_size)) - return struct.unpack(b'!I', node_bytes)[0] - - def _resolve_data_pointer(self, pointer): - resolved = pointer - self._metadata.node_count + \ - self._metadata.search_tree_size - - if resolved > self._buffer_size: - raise InvalidDatabaseError( - "The MaxMind DB file's search tree is corrupt") - - (data, _) = self._decoder.decode(resolved) - return data - - def close(self): - """Closes the MaxMind DB file and returns the resources to the system""" - # pylint: disable=unidiomatic-typecheck - if type(self._buffer) not in (str, bytes): - self._buffer.close() - - -class Metadata(object): - - """Metadata for the MaxMind DB reader""" - - # pylint: disable=too-many-instance-attributes - def __init__(self, **kwargs): - """Creates new Metadata object. kwargs are key/value pairs from spec""" - # Although I could just update __dict__, that is less obvious and it - # doesn't work well with static analysis tools and some IDEs - self.node_count = kwargs['node_count'] - self.record_size = kwargs['record_size'] - self.ip_version = kwargs['ip_version'] - self.database_type = kwargs['database_type'] - self.languages = kwargs['languages'] - self.binary_format_major_version = kwargs[ - 'binary_format_major_version'] - self.binary_format_minor_version = kwargs[ - 'binary_format_minor_version'] - self.build_epoch = kwargs['build_epoch'] - self.description = kwargs['description'] - - @property - def node_byte_size(self): - """The size of a node in bytes""" - return self.record_size // 4 - - @property - def search_tree_size(self): - """The size of the search tree""" - return self.node_count * self.node_byte_size - - def __repr__(self): - args = ', '.join('%s=%r' % x for x in self.__dict__.items()) - return '{module}.{class_name}({data})'.format( - module=self.__module__, - class_name=self.__class__.__name__, - data=args) diff --git a/plugins/Sidebar/media-globe/Detector.js b/plugins/Sidebar/media-globe/Detector.js deleted file mode 100644 index 1c074b83..00000000 --- a/plugins/Sidebar/media-globe/Detector.js +++ /dev/null @@ -1,60 +0,0 @@ -/** - * @author alteredq / http://alteredqualia.com/ - * @author mr.doob / http://mrdoob.com/ - */ - -Detector = { - - canvas : !! window.CanvasRenderingContext2D, - webgl : ( function () { try { return !! window.WebGLRenderingContext && !! document.createElement( 'canvas' ).getContext( 'experimental-webgl' ); } catch( e ) { return false; } } )(), - workers : !! window.Worker, - fileapi : window.File && window.FileReader && window.FileList && window.Blob, - - getWebGLErrorMessage : function () { - - var domElement = document.createElement( 'div' ); - - domElement.style.fontFamily = 'monospace'; - domElement.style.fontSize = '13px'; - domElement.style.textAlign = 'center'; - domElement.style.background = '#eee'; - domElement.style.color = '#000'; - domElement.style.padding = '1em'; - domElement.style.width = '475px'; - domElement.style.margin = '5em auto 0'; - - if ( ! this.webgl ) { - - domElement.innerHTML = window.WebGLRenderingContext ? [ - 'Sorry, your graphics card doesn\'t support WebGL' - ].join( '\n' ) : [ - 'Sorry, your browser doesn\'t support WebGL
    ', - 'Please try with', - 'Chrome, ', - 'Firefox 4 or', - 'Webkit Nightly (Mac)' - ].join( '\n' ); - - } - - return domElement; - - }, - - addGetWebGLMessage : function ( parameters ) { - - var parent, id, domElement; - - parameters = parameters || {}; - - parent = parameters.parent !== undefined ? parameters.parent : document.body; - id = parameters.id !== undefined ? parameters.id : 'oldie'; - - domElement = Detector.getWebGLErrorMessage(); - domElement.id = id; - - parent.appendChild( domElement ); - - } - -}; diff --git a/plugins/Sidebar/media-globe/Tween.js b/plugins/Sidebar/media-globe/Tween.js deleted file mode 100644 index bdf141ad..00000000 --- a/plugins/Sidebar/media-globe/Tween.js +++ /dev/null @@ -1,12 +0,0 @@ -// Tween.js - http://github.com/sole/tween.js -var TWEEN=TWEEN||function(){var a,e,c,d,f=[];return{start:function(g){c=setInterval(this.update,1E3/(g||60))},stop:function(){clearInterval(c)},add:function(g){f.push(g)},remove:function(g){a=f.indexOf(g);a!==-1&&f.splice(a,1)},update:function(){a=0;e=f.length;for(d=(new Date).getTime();a1?1:b;i=n(b);for(h in c)a[h]=e[h]+c[h]*i;l!==null&&l.call(a,i);if(b==1){m!==null&&m.call(a);k!==null&&k.start();return false}return true}};TWEEN.Easing={Linear:{},Quadratic:{},Cubic:{},Quartic:{},Quintic:{},Sinusoidal:{},Exponential:{},Circular:{},Elastic:{},Back:{},Bounce:{}};TWEEN.Easing.Linear.EaseNone=function(a){return a}; -TWEEN.Easing.Quadratic.EaseIn=function(a){return a*a};TWEEN.Easing.Quadratic.EaseOut=function(a){return-a*(a-2)};TWEEN.Easing.Quadratic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a;return-0.5*(--a*(a-2)-1)};TWEEN.Easing.Cubic.EaseIn=function(a){return a*a*a};TWEEN.Easing.Cubic.EaseOut=function(a){return--a*a*a+1};TWEEN.Easing.Cubic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a;return 0.5*((a-=2)*a*a+2)};TWEEN.Easing.Quartic.EaseIn=function(a){return a*a*a*a}; -TWEEN.Easing.Quartic.EaseOut=function(a){return-(--a*a*a*a-1)};TWEEN.Easing.Quartic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a;return-0.5*((a-=2)*a*a*a-2)};TWEEN.Easing.Quintic.EaseIn=function(a){return a*a*a*a*a};TWEEN.Easing.Quintic.EaseOut=function(a){return(a-=1)*a*a*a*a+1};TWEEN.Easing.Quintic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a*a;return 0.5*((a-=2)*a*a*a*a+2)};TWEEN.Easing.Sinusoidal.EaseIn=function(a){return-Math.cos(a*Math.PI/2)+1}; -TWEEN.Easing.Sinusoidal.EaseOut=function(a){return Math.sin(a*Math.PI/2)};TWEEN.Easing.Sinusoidal.EaseInOut=function(a){return-0.5*(Math.cos(Math.PI*a)-1)};TWEEN.Easing.Exponential.EaseIn=function(a){return a==0?0:Math.pow(2,10*(a-1))};TWEEN.Easing.Exponential.EaseOut=function(a){return a==1?1:-Math.pow(2,-10*a)+1};TWEEN.Easing.Exponential.EaseInOut=function(a){if(a==0)return 0;if(a==1)return 1;if((a*=2)<1)return 0.5*Math.pow(2,10*(a-1));return 0.5*(-Math.pow(2,-10*(a-1))+2)}; -TWEEN.Easing.Circular.EaseIn=function(a){return-(Math.sqrt(1-a*a)-1)};TWEEN.Easing.Circular.EaseOut=function(a){return Math.sqrt(1- --a*a)};TWEEN.Easing.Circular.EaseInOut=function(a){if((a/=0.5)<1)return-0.5*(Math.sqrt(1-a*a)-1);return 0.5*(Math.sqrt(1-(a-=2)*a)+1)};TWEEN.Easing.Elastic.EaseIn=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return-(c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d))}; -TWEEN.Easing.Elastic.EaseOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return c*Math.pow(2,-10*a)*Math.sin((a-e)*2*Math.PI/d)+1}; -TWEEN.Easing.Elastic.EaseInOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);if((a*=2)<1)return-0.5*c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d);return c*Math.pow(2,-10*(a-=1))*Math.sin((a-e)*2*Math.PI/d)*0.5+1};TWEEN.Easing.Back.EaseIn=function(a){return a*a*(2.70158*a-1.70158)};TWEEN.Easing.Back.EaseOut=function(a){return(a-=1)*a*(2.70158*a+1.70158)+1}; -TWEEN.Easing.Back.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*(3.5949095*a-2.5949095);return 0.5*((a-=2)*a*(3.5949095*a+2.5949095)+2)};TWEEN.Easing.Bounce.EaseIn=function(a){return 1-TWEEN.Easing.Bounce.EaseOut(1-a)};TWEEN.Easing.Bounce.EaseOut=function(a){return(a/=1)<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+0.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+0.9375:7.5625*(a-=2.625/2.75)*a+0.984375}; -TWEEN.Easing.Bounce.EaseInOut=function(a){if(a<0.5)return TWEEN.Easing.Bounce.EaseIn(a*2)*0.5;return TWEEN.Easing.Bounce.EaseOut(a*2-1)*0.5+0.5}; diff --git a/plugins/Sidebar/media-globe/all.js b/plugins/Sidebar/media-globe/all.js deleted file mode 100644 index 5ddc0313..00000000 --- a/plugins/Sidebar/media-globe/all.js +++ /dev/null @@ -1,1345 +0,0 @@ - - -/* ---- plugins/Sidebar/media-globe/Detector.js ---- */ - - -/** - * @author alteredq / http://alteredqualia.com/ - * @author mr.doob / http://mrdoob.com/ - */ - -Detector = { - - canvas : !! window.CanvasRenderingContext2D, - webgl : ( function () { try { return !! window.WebGLRenderingContext && !! document.createElement( 'canvas' ).getContext( 'experimental-webgl' ); } catch( e ) { return false; } } )(), - workers : !! window.Worker, - fileapi : window.File && window.FileReader && window.FileList && window.Blob, - - getWebGLErrorMessage : function () { - - var domElement = document.createElement( 'div' ); - - domElement.style.fontFamily = 'monospace'; - domElement.style.fontSize = '13px'; - domElement.style.textAlign = 'center'; - domElement.style.background = '#eee'; - domElement.style.color = '#000'; - domElement.style.padding = '1em'; - domElement.style.width = '475px'; - domElement.style.margin = '5em auto 0'; - - if ( ! this.webgl ) { - - domElement.innerHTML = window.WebGLRenderingContext ? [ - 'Sorry, your graphics card doesn\'t support WebGL' - ].join( '\n' ) : [ - 'Sorry, your browser doesn\'t support WebGL
    ', - 'Please try with', - 'Chrome, ', - 'Firefox 4 or', - 'Webkit Nightly (Mac)' - ].join( '\n' ); - - } - - return domElement; - - }, - - addGetWebGLMessage : function ( parameters ) { - - var parent, id, domElement; - - parameters = parameters || {}; - - parent = parameters.parent !== undefined ? parameters.parent : document.body; - id = parameters.id !== undefined ? parameters.id : 'oldie'; - - domElement = Detector.getWebGLErrorMessage(); - domElement.id = id; - - parent.appendChild( domElement ); - - } - -}; - - - -/* ---- plugins/Sidebar/media-globe/Tween.js ---- */ - - -// Tween.js - http://github.com/sole/tween.js -var TWEEN=TWEEN||function(){var a,e,c,d,f=[];return{start:function(g){c=setInterval(this.update,1E3/(g||60))},stop:function(){clearInterval(c)},add:function(g){f.push(g)},remove:function(g){a=f.indexOf(g);a!==-1&&f.splice(a,1)},update:function(){a=0;e=f.length;for(d=(new Date).getTime();a1?1:b;i=n(b);for(h in c)a[h]=e[h]+c[h]*i;l!==null&&l.call(a,i);if(b==1){m!==null&&m.call(a);k!==null&&k.start();return false}return true}};TWEEN.Easing={Linear:{},Quadratic:{},Cubic:{},Quartic:{},Quintic:{},Sinusoidal:{},Exponential:{},Circular:{},Elastic:{},Back:{},Bounce:{}};TWEEN.Easing.Linear.EaseNone=function(a){return a}; -TWEEN.Easing.Quadratic.EaseIn=function(a){return a*a};TWEEN.Easing.Quadratic.EaseOut=function(a){return-a*(a-2)};TWEEN.Easing.Quadratic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a;return-0.5*(--a*(a-2)-1)};TWEEN.Easing.Cubic.EaseIn=function(a){return a*a*a};TWEEN.Easing.Cubic.EaseOut=function(a){return--a*a*a+1};TWEEN.Easing.Cubic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a;return 0.5*((a-=2)*a*a+2)};TWEEN.Easing.Quartic.EaseIn=function(a){return a*a*a*a}; -TWEEN.Easing.Quartic.EaseOut=function(a){return-(--a*a*a*a-1)};TWEEN.Easing.Quartic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a;return-0.5*((a-=2)*a*a*a-2)};TWEEN.Easing.Quintic.EaseIn=function(a){return a*a*a*a*a};TWEEN.Easing.Quintic.EaseOut=function(a){return(a-=1)*a*a*a*a+1};TWEEN.Easing.Quintic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a*a;return 0.5*((a-=2)*a*a*a*a+2)};TWEEN.Easing.Sinusoidal.EaseIn=function(a){return-Math.cos(a*Math.PI/2)+1}; -TWEEN.Easing.Sinusoidal.EaseOut=function(a){return Math.sin(a*Math.PI/2)};TWEEN.Easing.Sinusoidal.EaseInOut=function(a){return-0.5*(Math.cos(Math.PI*a)-1)};TWEEN.Easing.Exponential.EaseIn=function(a){return a==0?0:Math.pow(2,10*(a-1))};TWEEN.Easing.Exponential.EaseOut=function(a){return a==1?1:-Math.pow(2,-10*a)+1};TWEEN.Easing.Exponential.EaseInOut=function(a){if(a==0)return 0;if(a==1)return 1;if((a*=2)<1)return 0.5*Math.pow(2,10*(a-1));return 0.5*(-Math.pow(2,-10*(a-1))+2)}; -TWEEN.Easing.Circular.EaseIn=function(a){return-(Math.sqrt(1-a*a)-1)};TWEEN.Easing.Circular.EaseOut=function(a){return Math.sqrt(1- --a*a)};TWEEN.Easing.Circular.EaseInOut=function(a){if((a/=0.5)<1)return-0.5*(Math.sqrt(1-a*a)-1);return 0.5*(Math.sqrt(1-(a-=2)*a)+1)};TWEEN.Easing.Elastic.EaseIn=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return-(c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d))}; -TWEEN.Easing.Elastic.EaseOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return c*Math.pow(2,-10*a)*Math.sin((a-e)*2*Math.PI/d)+1}; -TWEEN.Easing.Elastic.EaseInOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);if((a*=2)<1)return-0.5*c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d);return c*Math.pow(2,-10*(a-=1))*Math.sin((a-e)*2*Math.PI/d)*0.5+1};TWEEN.Easing.Back.EaseIn=function(a){return a*a*(2.70158*a-1.70158)};TWEEN.Easing.Back.EaseOut=function(a){return(a-=1)*a*(2.70158*a+1.70158)+1}; -TWEEN.Easing.Back.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*(3.5949095*a-2.5949095);return 0.5*((a-=2)*a*(3.5949095*a+2.5949095)+2)};TWEEN.Easing.Bounce.EaseIn=function(a){return 1-TWEEN.Easing.Bounce.EaseOut(1-a)};TWEEN.Easing.Bounce.EaseOut=function(a){return(a/=1)<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+0.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+0.9375:7.5625*(a-=2.625/2.75)*a+0.984375}; -TWEEN.Easing.Bounce.EaseInOut=function(a){if(a<0.5)return TWEEN.Easing.Bounce.EaseIn(a*2)*0.5;return TWEEN.Easing.Bounce.EaseOut(a*2-1)*0.5+0.5}; - - - -/* ---- plugins/Sidebar/media-globe/globe.js ---- */ - - -/** - * dat.globe Javascript WebGL Globe Toolkit - * http://dataarts.github.com/dat.globe - * - * Copyright 2011 Data Arts Team, Google Creative Lab - * - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ - -var DAT = DAT || {}; - -DAT.Globe = function(container, opts) { - opts = opts || {}; - - var colorFn = opts.colorFn || function(x) { - var c = new THREE.Color(); - c.setHSL( ( 0.5 - (x * 2) ), Math.max(0.8, 1.0 - (x * 3)), 0.5 ); - return c; - }; - var imgDir = opts.imgDir || '/globe/'; - - var Shaders = { - 'earth' : { - uniforms: { - 'texture': { type: 't', value: null } - }, - vertexShader: [ - 'varying vec3 vNormal;', - 'varying vec2 vUv;', - 'void main() {', - 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', - 'vNormal = normalize( normalMatrix * normal );', - 'vUv = uv;', - '}' - ].join('\n'), - fragmentShader: [ - 'uniform sampler2D texture;', - 'varying vec3 vNormal;', - 'varying vec2 vUv;', - 'void main() {', - 'vec3 diffuse = texture2D( texture, vUv ).xyz;', - 'float intensity = 1.05 - dot( vNormal, vec3( 0.0, 0.0, 1.0 ) );', - 'vec3 atmosphere = vec3( 1.0, 1.0, 1.0 ) * pow( intensity, 3.0 );', - 'gl_FragColor = vec4( diffuse + atmosphere, 1.0 );', - '}' - ].join('\n') - }, - 'atmosphere' : { - uniforms: {}, - vertexShader: [ - 'varying vec3 vNormal;', - 'void main() {', - 'vNormal = normalize( normalMatrix * normal );', - 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', - '}' - ].join('\n'), - fragmentShader: [ - 'varying vec3 vNormal;', - 'void main() {', - 'float intensity = pow( 0.8 - dot( vNormal, vec3( 0, 0, 1.0 ) ), 12.0 );', - 'gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0 ) * intensity;', - '}' - ].join('\n') - } - }; - - var camera, scene, renderer, w, h; - var mesh, atmosphere, point, running; - - var overRenderer; - var running = true; - - var curZoomSpeed = 0; - var zoomSpeed = 50; - - var mouse = { x: 0, y: 0 }, mouseOnDown = { x: 0, y: 0 }; - var rotation = { x: 0, y: 0 }, - target = { x: Math.PI*3/2, y: Math.PI / 6.0 }, - targetOnDown = { x: 0, y: 0 }; - - var distance = 100000, distanceTarget = 100000; - var padding = 10; - var PI_HALF = Math.PI / 2; - - function init() { - - container.style.color = '#fff'; - container.style.font = '13px/20px Arial, sans-serif'; - - var shader, uniforms, material; - w = container.offsetWidth || window.innerWidth; - h = container.offsetHeight || window.innerHeight; - - camera = new THREE.PerspectiveCamera(30, w / h, 1, 10000); - camera.position.z = distance; - - scene = new THREE.Scene(); - - var geometry = new THREE.SphereGeometry(200, 40, 30); - - shader = Shaders['earth']; - uniforms = THREE.UniformsUtils.clone(shader.uniforms); - - uniforms['texture'].value = THREE.ImageUtils.loadTexture(imgDir+'world.jpg'); - - material = new THREE.ShaderMaterial({ - - uniforms: uniforms, - vertexShader: shader.vertexShader, - fragmentShader: shader.fragmentShader - - }); - - mesh = new THREE.Mesh(geometry, material); - mesh.rotation.y = Math.PI; - scene.add(mesh); - - shader = Shaders['atmosphere']; - uniforms = THREE.UniformsUtils.clone(shader.uniforms); - - material = new THREE.ShaderMaterial({ - - uniforms: uniforms, - vertexShader: shader.vertexShader, - fragmentShader: shader.fragmentShader, - side: THREE.BackSide, - blending: THREE.AdditiveBlending, - transparent: true - - }); - - mesh = new THREE.Mesh(geometry, material); - mesh.scale.set( 1.1, 1.1, 1.1 ); - scene.add(mesh); - - geometry = new THREE.BoxGeometry(2.75, 2.75, 1); - geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0,0,-0.5)); - - point = new THREE.Mesh(geometry); - - renderer = new THREE.WebGLRenderer({antialias: true}); - renderer.setSize(w, h); - renderer.setClearColor( 0x212121, 1 ); - - renderer.domElement.style.position = 'relative'; - - container.appendChild(renderer.domElement); - - container.addEventListener('mousedown', onMouseDown, false); - - if ('onwheel' in document) { - container.addEventListener('wheel', onMouseWheel, false); - } else { - container.addEventListener('mousewheel', onMouseWheel, false); - } - - document.addEventListener('keydown', onDocumentKeyDown, false); - - window.addEventListener('resize', onWindowResize, false); - - container.addEventListener('mouseover', function() { - overRenderer = true; - }, false); - - container.addEventListener('mouseout', function() { - overRenderer = false; - }, false); - } - - function addData(data, opts) { - var lat, lng, size, color, i, step, colorFnWrapper; - - opts.animated = opts.animated || false; - this.is_animated = opts.animated; - opts.format = opts.format || 'magnitude'; // other option is 'legend' - if (opts.format === 'magnitude') { - step = 3; - colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } - } else if (opts.format === 'legend') { - step = 4; - colorFnWrapper = function(data, i) { return colorFn(data[i+3]); } - } else if (opts.format === 'peer') { - colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } - } else { - throw('error: format not supported: '+opts.format); - } - - if (opts.animated) { - if (this._baseGeometry === undefined) { - this._baseGeometry = new THREE.Geometry(); - for (i = 0; i < data.length; i += step) { - lat = data[i]; - lng = data[i + 1]; -// size = data[i + 2]; - color = colorFnWrapper(data,i); - size = 0; - addPoint(lat, lng, size, color, this._baseGeometry); - } - } - if(this._morphTargetId === undefined) { - this._morphTargetId = 0; - } else { - this._morphTargetId += 1; - } - opts.name = opts.name || 'morphTarget'+this._morphTargetId; - } - var subgeo = new THREE.Geometry(); - for (i = 0; i < data.length; i += step) { - lat = data[i]; - lng = data[i + 1]; - color = colorFnWrapper(data,i); - size = data[i + 2]; - size = size*200; - addPoint(lat, lng, size, color, subgeo); - } - if (opts.animated) { - this._baseGeometry.morphTargets.push({'name': opts.name, vertices: subgeo.vertices}); - } else { - this._baseGeometry = subgeo; - } - - }; - - function createPoints() { - if (this._baseGeometry !== undefined) { - if (this.is_animated === false) { - this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ - color: 0xffffff, - vertexColors: THREE.FaceColors, - morphTargets: false - })); - } else { - if (this._baseGeometry.morphTargets.length < 8) { - console.log('t l',this._baseGeometry.morphTargets.length); - var padding = 8-this._baseGeometry.morphTargets.length; - console.log('padding', padding); - for(var i=0; i<=padding; i++) { - console.log('padding',i); - this._baseGeometry.morphTargets.push({'name': 'morphPadding'+i, vertices: this._baseGeometry.vertices}); - } - } - this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ - color: 0xffffff, - vertexColors: THREE.FaceColors, - morphTargets: true - })); - } - scene.add(this.points); - } - } - - function addPoint(lat, lng, size, color, subgeo) { - - var phi = (90 - lat) * Math.PI / 180; - var theta = (180 - lng) * Math.PI / 180; - - point.position.x = 200 * Math.sin(phi) * Math.cos(theta); - point.position.y = 200 * Math.cos(phi); - point.position.z = 200 * Math.sin(phi) * Math.sin(theta); - - point.lookAt(mesh.position); - - point.scale.z = Math.max( size, 0.1 ); // avoid non-invertible matrix - point.updateMatrix(); - - for (var i = 0; i < point.geometry.faces.length; i++) { - - point.geometry.faces[i].color = color; - - } - if(point.matrixAutoUpdate){ - point.updateMatrix(); - } - subgeo.merge(point.geometry, point.matrix); - } - - function onMouseDown(event) { - event.preventDefault(); - - container.addEventListener('mousemove', onMouseMove, false); - container.addEventListener('mouseup', onMouseUp, false); - container.addEventListener('mouseout', onMouseOut, false); - - mouseOnDown.x = - event.clientX; - mouseOnDown.y = event.clientY; - - targetOnDown.x = target.x; - targetOnDown.y = target.y; - - container.style.cursor = 'move'; - } - - function onMouseMove(event) { - mouse.x = - event.clientX; - mouse.y = event.clientY; - - var zoomDamp = distance/1000; - - target.x = targetOnDown.x + (mouse.x - mouseOnDown.x) * 0.005 * zoomDamp; - target.y = targetOnDown.y + (mouse.y - mouseOnDown.y) * 0.005 * zoomDamp; - - target.y = target.y > PI_HALF ? PI_HALF : target.y; - target.y = target.y < - PI_HALF ? - PI_HALF : target.y; - } - - function onMouseUp(event) { - container.removeEventListener('mousemove', onMouseMove, false); - container.removeEventListener('mouseup', onMouseUp, false); - container.removeEventListener('mouseout', onMouseOut, false); - container.style.cursor = 'auto'; - } - - function onMouseOut(event) { - container.removeEventListener('mousemove', onMouseMove, false); - container.removeEventListener('mouseup', onMouseUp, false); - container.removeEventListener('mouseout', onMouseOut, false); - } - - function onMouseWheel(event) { - if (container.style.cursor != "move") return false; - event.preventDefault(); - if (overRenderer) { - if (event.deltaY) { - zoom(-event.deltaY * (event.deltaMode == 0 ? 1 : 50)); - } else { - zoom(event.wheelDeltaY * 0.3); - } - } - return false; - } - - function onDocumentKeyDown(event) { - switch (event.keyCode) { - case 38: - zoom(100); - event.preventDefault(); - break; - case 40: - zoom(-100); - event.preventDefault(); - break; - } - } - - function onWindowResize( event ) { - camera.aspect = container.offsetWidth / container.offsetHeight; - camera.updateProjectionMatrix(); - renderer.setSize( container.offsetWidth, container.offsetHeight ); - } - - function zoom(delta) { - distanceTarget -= delta; - distanceTarget = distanceTarget > 855 ? 855 : distanceTarget; - distanceTarget = distanceTarget < 350 ? 350 : distanceTarget; - } - - function animate() { - if (!running) return - requestAnimationFrame(animate); - render(); - } - - function render() { - zoom(curZoomSpeed); - - rotation.x += (target.x - rotation.x) * 0.1; - rotation.y += (target.y - rotation.y) * 0.1; - distance += (distanceTarget - distance) * 0.3; - - camera.position.x = distance * Math.sin(rotation.x) * Math.cos(rotation.y); - camera.position.y = distance * Math.sin(rotation.y); - camera.position.z = distance * Math.cos(rotation.x) * Math.cos(rotation.y); - - camera.lookAt(mesh.position); - - renderer.render(scene, camera); - } - - function unload() { - running = false - container.removeEventListener('mousedown', onMouseDown, false); - if ('onwheel' in document) { - container.removeEventListener('wheel', onMouseWheel, false); - } else { - container.removeEventListener('mousewheel', onMouseWheel, false); - } - document.removeEventListener('keydown', onDocumentKeyDown, false); - window.removeEventListener('resize', onWindowResize, false); - - } - - init(); - this.animate = animate; - this.unload = unload; - - - this.__defineGetter__('time', function() { - return this._time || 0; - }); - - this.__defineSetter__('time', function(t) { - var validMorphs = []; - var morphDict = this.points.morphTargetDictionary; - for(var k in morphDict) { - if(k.indexOf('morphPadding') < 0) { - validMorphs.push(morphDict[k]); - } - } - validMorphs.sort(); - var l = validMorphs.length-1; - var scaledt = t*l+1; - var index = Math.floor(scaledt); - for (i=0;i= 0) { - this.points.morphTargetInfluences[lastIndex] = 1 - leftover; - } - this.points.morphTargetInfluences[index] = leftover; - this._time = t; - }); - - this.addData = addData; - this.createPoints = createPoints; - this.renderer = renderer; - this.scene = scene; - - return this; - -}; - - - -/* ---- plugins/Sidebar/media-globe/three.min.js ---- */ - - -// threejs.org/license -'use strict';var THREE={REVISION:"69"};"object"===typeof module&&(module.exports=THREE);void 0===Math.sign&&(Math.sign=function(a){return 0>a?-1:0>16&255)/255;this.g=(a>>8&255)/255;this.b=(a&255)/255;return this},setRGB:function(a,b,c){this.r=a;this.g=b;this.b=c;return this},setHSL:function(a,b,c){if(0===b)this.r=this.g=this.b=c;else{var d=function(a,b,c){0>c&&(c+=1);1c?b:c<2/3?a+6*(b-a)*(2/3-c):a};b=.5>=c?c*(1+b):c+b-c*b;c=2*c-b;this.r=d(c,b,a+1/3);this.g=d(c,b,a);this.b=d(c,b,a-1/3)}return this},setStyle:function(a){if(/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.test(a))return a=/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.exec(a),this.r=Math.min(255,parseInt(a[1],10))/255,this.g=Math.min(255,parseInt(a[2],10))/255,this.b=Math.min(255,parseInt(a[3],10))/255,this;if(/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.test(a))return a=/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.exec(a),this.r= -Math.min(100,parseInt(a[1],10))/100,this.g=Math.min(100,parseInt(a[2],10))/100,this.b=Math.min(100,parseInt(a[3],10))/100,this;if(/^\#([0-9a-f]{6})$/i.test(a))return a=/^\#([0-9a-f]{6})$/i.exec(a),this.setHex(parseInt(a[1],16)),this;if(/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.test(a))return a=/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(a),this.setHex(parseInt(a[1]+a[1]+a[2]+a[2]+a[3]+a[3],16)),this;if(/^(\w+)$/i.test(a))return this.setHex(THREE.ColorKeywords[a]),this},copy:function(a){this.r=a.r;this.g= -a.g;this.b=a.b;return this},copyGammaToLinear:function(a){this.r=a.r*a.r;this.g=a.g*a.g;this.b=a.b*a.b;return this},copyLinearToGamma:function(a){this.r=Math.sqrt(a.r);this.g=Math.sqrt(a.g);this.b=Math.sqrt(a.b);return this},convertGammaToLinear:function(){var a=this.r,b=this.g,c=this.b;this.r=a*a;this.g=b*b;this.b=c*c;return this},convertLinearToGamma:function(){this.r=Math.sqrt(this.r);this.g=Math.sqrt(this.g);this.b=Math.sqrt(this.b);return this},getHex:function(){return 255*this.r<<16^255*this.g<< -8^255*this.b<<0},getHexString:function(){return("000000"+this.getHex().toString(16)).slice(-6)},getHSL:function(a){a=a||{h:0,s:0,l:0};var b=this.r,c=this.g,d=this.b,e=Math.max(b,c,d),f=Math.min(b,c,d),g,h=(f+e)/2;if(f===e)f=g=0;else{var k=e-f,f=.5>=h?k/(e+f):k/(2-e-f);switch(e){case b:g=(c-d)/k+(cf&&c>b?(c=2*Math.sqrt(1+c-f-b),this._w=(k-g)/c,this._x=.25*c,this._y=(a+e)/c,this._z=(d+h)/c):f>b?(c=2*Math.sqrt(1+f-c-b),this._w=(d-h)/c,this._x=(a+e)/c,this._y= -.25*c,this._z=(g+k)/c):(c=2*Math.sqrt(1+b-c-f),this._w=(e-a)/c,this._x=(d+h)/c,this._y=(g+k)/c,this._z=.25*c);this.onChangeCallback();return this},setFromUnitVectors:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3);b=c.dot(d)+1;1E-6>b?(b=0,Math.abs(c.x)>Math.abs(c.z)?a.set(-c.y,c.x,0):a.set(0,-c.z,c.y)):a.crossVectors(c,d);this._x=a.x;this._y=a.y;this._z=a.z;this._w=b;this.normalize();return this}}(),inverse:function(){this.conjugate().normalize();return this},conjugate:function(){this._x*= --1;this._y*=-1;this._z*=-1;this.onChangeCallback();return this},dot:function(a){return this._x*a._x+this._y*a._y+this._z*a._z+this._w*a._w},lengthSq:function(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w},length:function(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)},normalize:function(){var a=this.length();0===a?(this._z=this._y=this._x=0,this._w=1):(a=1/a,this._x*=a,this._y*=a,this._z*=a,this._w*=a);this.onChangeCallback();return this}, -multiply:function(a,b){return void 0!==b?(console.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(a,b)):this.multiplyQuaternions(this,a)},multiplyQuaternions:function(a,b){var c=a._x,d=a._y,e=a._z,f=a._w,g=b._x,h=b._y,k=b._z,n=b._w;this._x=c*n+f*g+d*k-e*h;this._y=d*n+f*h+e*g-c*k;this._z=e*n+f*k+c*h-d*g;this._w=f*n-c*g-d*h-e*k;this.onChangeCallback();return this},multiplyVector3:function(a){console.warn("THREE.Quaternion: .multiplyVector3() has been removed. Use is now vector.applyQuaternion( quaternion ) instead."); -return a.applyQuaternion(this)},slerp:function(a,b){if(0===b)return this;if(1===b)return this.copy(a);var c=this._x,d=this._y,e=this._z,f=this._w,g=f*a._w+c*a._x+d*a._y+e*a._z;0>g?(this._w=-a._w,this._x=-a._x,this._y=-a._y,this._z=-a._z,g=-g):this.copy(a);if(1<=g)return this._w=f,this._x=c,this._y=d,this._z=e,this;var h=Math.acos(g),k=Math.sqrt(1-g*g);if(.001>Math.abs(k))return this._w=.5*(f+this._w),this._x=.5*(c+this._x),this._y=.5*(d+this._y),this._z=.5*(e+this._z),this;g=Math.sin((1-b)*h)/k;h= -Math.sin(b*h)/k;this._w=f*g+this._w*h;this._x=c*g+this._x*h;this._y=d*g+this._y*h;this._z=e*g+this._z*h;this.onChangeCallback();return this},equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._w===this._w},fromArray:function(a,b){void 0===b&&(b=0);this._x=a[b];this._y=a[b+1];this._z=a[b+2];this._w=a[b+3];this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._w;return a},onChange:function(a){this.onChangeCallback= -a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Quaternion(this._x,this._y,this._z,this._w)}};THREE.Quaternion.slerp=function(a,b,c,d){return c.copy(a).slerp(b,d)};THREE.Vector2=function(a,b){this.x=a||0;this.y=b||0}; -THREE.Vector2.prototype={constructor:THREE.Vector2,set:function(a,b){this.x=a;this.y=b;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;return this},add:function(a, -b){if(void 0!==b)return console.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;return this},addScalar:function(a){this.x+=a;this.y+=a;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;return this}, -subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;return this},multiply:function(a){this.x*=a.x;this.y*=a.y;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;return this},divide:function(a){this.x/=a.x;this.y/=a.y;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a):this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector2,b=new THREE.Vector2);a.set(c,c);b.set(d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);return this}, -roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);return this},negate:function(){this.x=-this.x;this.y=-this.y;return this},dot:function(a){return this.x*a.x+this.y*a.y},lengthSq:function(){return this.x*this.x+this.y*this.y},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y)},normalize:function(){return this.divideScalar(this.length())},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b= -this.x-a.x;a=this.y-a.y;return b*b+a*a},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;return a},clone:function(){return new THREE.Vector2(this.x,this.y)}}; -THREE.Vector3=function(a,b,c){this.x=a||0;this.y=b||0;this.z=c||0}; -THREE.Vector3.prototype={constructor:THREE.Vector3,set:function(a,b,c){this.x=a;this.y=b;this.z=c;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:throw Error("index is out of range: "+ -a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."), -this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;return this},multiply:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(a,b);this.x*=a.x;this.y*=a.y;this.z*=a.z;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;return this},multiplyVectors:function(a,b){this.x=a.x*b.x;this.y= -a.y*b.y;this.z=a.z*b.z;return this},applyEuler:function(){var a;return function(b){!1===b instanceof THREE.Euler&&console.error("THREE.Vector3: .applyEuler() now expects a Euler rotation rather than a Vector3 and order.");void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromEuler(b));return this}}(),applyAxisAngle:function(){var a;return function(b,c){void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromAxisAngle(b,c));return this}}(),applyMatrix3:function(a){var b=this.x, -c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[3]*c+a[6]*d;this.y=a[1]*b+a[4]*c+a[7]*d;this.z=a[2]*b+a[5]*c+a[8]*d;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12];this.y=a[1]*b+a[5]*c+a[9]*d+a[13];this.z=a[2]*b+a[6]*c+a[10]*d+a[14];return this},applyProjection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;var e=1/(a[3]*b+a[7]*c+a[11]*d+a[15]);this.x=(a[0]*b+a[4]*c+a[8]*d+a[12])*e;this.y=(a[1]*b+a[5]*c+a[9]*d+a[13])*e;this.z= -(a[2]*b+a[6]*c+a[10]*d+a[14])*e;return this},applyQuaternion:function(a){var b=this.x,c=this.y,d=this.z,e=a.x,f=a.y,g=a.z;a=a.w;var h=a*b+f*d-g*c,k=a*c+g*b-e*d,n=a*d+e*c-f*b,b=-e*b-f*c-g*d;this.x=h*a+b*-e+k*-g-n*-f;this.y=k*a+b*-f+n*-e-h*-g;this.z=n*a+b*-g+h*-f-k*-e;return this},project:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.projectionMatrix,a.getInverse(b.matrixWorld));return this.applyProjection(a)}}(),unproject:function(){var a;return function(b){void 0=== -a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.matrixWorld,a.getInverse(b.projectionMatrix));return this.applyProjection(a)}}(),transformDirection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d;this.y=a[1]*b+a[5]*c+a[9]*d;this.z=a[2]*b+a[6]*c+a[10]*d;this.normalize();return this},divide:function(a){this.x/=a.x;this.y/=a.y;this.z/=a.z;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a):this.z=this.y=this.x=0;return this},min:function(a){this.x> -a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3,b=new THREE.Vector3);a.set(c,c,c);b.set(d,d,d);return this.clamp(a, -b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z); -return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)},normalize:function(){return this.divideScalar(this.length())},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/ -b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;return this},cross:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(a,b);var c=this.x,d=this.y,e=this.z;this.x=d*a.z-e*a.y;this.y=e*a.x-c*a.z;this.z=c*a.y-d*a.x;return this},crossVectors:function(a,b){var c=a.x,d=a.y,e=a.z,f=b.x,g=b.y,h=b.z;this.x=d*h-e*g;this.y=e*f-c*h;this.z=c*g-d*f;return this}, -projectOnVector:function(){var a,b;return function(c){void 0===a&&(a=new THREE.Vector3);a.copy(c).normalize();b=this.dot(a);return this.copy(a).multiplyScalar(b)}}(),projectOnPlane:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);a.copy(this).projectOnVector(b);return this.sub(a)}}(),reflect:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);return this.sub(a.copy(b).multiplyScalar(2*this.dot(b)))}}(),angleTo:function(a){a=this.dot(a)/(this.length()*a.length()); -return Math.acos(THREE.Math.clamp(a,-1,1))},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b=this.x-a.x,c=this.y-a.y;a=this.z-a.z;return b*b+c*c+a*a},setEulerFromRotationMatrix:function(a,b){console.error("THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotationMatrix() instead.")},setEulerFromQuaternion:function(a,b){console.error("THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.")}, -getPositionFromMatrix:function(a){console.warn("THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().");return this.setFromMatrixPosition(a)},getScaleFromMatrix:function(a){console.warn("THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().");return this.setFromMatrixScale(a)},getColumnFromMatrix:function(a,b){console.warn("THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().");return this.setFromMatrixColumn(a, -b)},setFromMatrixPosition:function(a){this.x=a.elements[12];this.y=a.elements[13];this.z=a.elements[14];return this},setFromMatrixScale:function(a){var b=this.set(a.elements[0],a.elements[1],a.elements[2]).length(),c=this.set(a.elements[4],a.elements[5],a.elements[6]).length();a=this.set(a.elements[8],a.elements[9],a.elements[10]).length();this.x=b;this.y=c;this.z=a;return this},setFromMatrixColumn:function(a,b){var c=4*a,d=b.elements;this.x=d[c];this.y=d[c+1];this.z=d[c+2];return this},equals:function(a){return a.x=== -this.x&&a.y===this.y&&a.z===this.z},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;return a},clone:function(){return new THREE.Vector3(this.x,this.y,this.z)}};THREE.Vector4=function(a,b,c,d){this.x=a||0;this.y=b||0;this.z=c||0;this.w=void 0!==d?d:1}; -THREE.Vector4.prototype={constructor:THREE.Vector4,set:function(a,b,c,d){this.x=a;this.y=b;this.z=c;this.w=d;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setW:function(a){this.w=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;case 3:this.w=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x; -case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;this.w=void 0!==a.w?a.w:1;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;this.w+=a.w;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;this.w+=a;return this}, -addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;this.w=a.w+b.w;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;this.w-=a.w;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;this.w=a.w-b.w;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;this.w*=a;return this},applyMatrix4:function(a){var b= -this.x,c=this.y,d=this.z,e=this.w;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12]*e;this.y=a[1]*b+a[5]*c+a[9]*d+a[13]*e;this.z=a[2]*b+a[6]*c+a[10]*d+a[14]*e;this.w=a[3]*b+a[7]*c+a[11]*d+a[15]*e;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a,this.w*=a):(this.z=this.y=this.x=0,this.w=1);return this},setAxisAngleFromQuaternion:function(a){this.w=2*Math.acos(a.w);var b=Math.sqrt(1-a.w*a.w);1E-4>b?(this.x=1,this.z=this.y=0):(this.x=a.x/b,this.y=a.y/b,this.z=a.z/b);return this}, -setAxisAngleFromRotationMatrix:function(a){var b,c,d;a=a.elements;var e=a[0];d=a[4];var f=a[8],g=a[1],h=a[5],k=a[9];c=a[2];b=a[6];var n=a[10];if(.01>Math.abs(d-g)&&.01>Math.abs(f-c)&&.01>Math.abs(k-b)){if(.1>Math.abs(d+g)&&.1>Math.abs(f+c)&&.1>Math.abs(k+b)&&.1>Math.abs(e+h+n-3))return this.set(1,0,0,0),this;a=Math.PI;e=(e+1)/2;h=(h+1)/2;n=(n+1)/2;d=(d+g)/4;f=(f+c)/4;k=(k+b)/4;e>h&&e>n?.01>e?(b=0,d=c=.707106781):(b=Math.sqrt(e),c=d/b,d=f/b):h>n?.01>h?(b=.707106781,c=0,d=.707106781):(c=Math.sqrt(h), -b=d/c,d=k/c):.01>n?(c=b=.707106781,d=0):(d=Math.sqrt(n),b=f/d,c=k/d);this.set(b,c,d,a);return this}a=Math.sqrt((b-k)*(b-k)+(f-c)*(f-c)+(g-d)*(g-d));.001>Math.abs(a)&&(a=1);this.x=(b-k)/a;this.y=(f-c)/a;this.z=(g-d)/a;this.w=Math.acos((e+h+n-1)/2);return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);this.w>a.w&&(this.w=a.w);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);this.wb.w&&(this.w=b.w);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector4,b=new THREE.Vector4);a.set(c,c,c,c);b.set(d,d,d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);this.w=Math.floor(this.w); -return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);this.w=Math.ceil(this.w);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);this.w=Math.round(this.w);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);this.w=0>this.w?Math.ceil(this.w):Math.floor(this.w); -return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;this.w=-this.w;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z+this.w*a.w},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)+Math.abs(this.w)},normalize:function(){return this.divideScalar(this.length())}, -setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;this.w+=(a.w-this.w)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z&&a.w===this.w},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];this.w=a[b+3];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]= -this.z;a[b+3]=this.w;return a},clone:function(){return new THREE.Vector4(this.x,this.y,this.z,this.w)}};THREE.Euler=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._order=d||THREE.Euler.DefaultOrder};THREE.Euler.RotationOrders="XYZ YZX ZXY XZY YXZ ZYX".split(" ");THREE.Euler.DefaultOrder="XYZ"; -THREE.Euler.prototype={constructor:THREE.Euler,_x:0,_y:0,_z:0,_order:THREE.Euler.DefaultOrder,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get order(){return this._order},set order(a){this._order=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._order=d||this._order;this.onChangeCallback();return this},copy:function(a){this._x= -a._x;this._y=a._y;this._z=a._z;this._order=a._order;this.onChangeCallback();return this},setFromRotationMatrix:function(a,b){var c=THREE.Math.clamp,d=a.elements,e=d[0],f=d[4],g=d[8],h=d[1],k=d[5],n=d[9],p=d[2],q=d[6],d=d[10];b=b||this._order;"XYZ"===b?(this._y=Math.asin(c(g,-1,1)),.99999>Math.abs(g)?(this._x=Math.atan2(-n,d),this._z=Math.atan2(-f,e)):(this._x=Math.atan2(q,k),this._z=0)):"YXZ"===b?(this._x=Math.asin(-c(n,-1,1)),.99999>Math.abs(n)?(this._y=Math.atan2(g,d),this._z=Math.atan2(h,k)):(this._y= -Math.atan2(-p,e),this._z=0)):"ZXY"===b?(this._x=Math.asin(c(q,-1,1)),.99999>Math.abs(q)?(this._y=Math.atan2(-p,d),this._z=Math.atan2(-f,k)):(this._y=0,this._z=Math.atan2(h,e))):"ZYX"===b?(this._y=Math.asin(-c(p,-1,1)),.99999>Math.abs(p)?(this._x=Math.atan2(q,d),this._z=Math.atan2(h,e)):(this._x=0,this._z=Math.atan2(-f,k))):"YZX"===b?(this._z=Math.asin(c(h,-1,1)),.99999>Math.abs(h)?(this._x=Math.atan2(-n,k),this._y=Math.atan2(-p,e)):(this._x=0,this._y=Math.atan2(g,d))):"XZY"===b?(this._z=Math.asin(-c(f, --1,1)),.99999>Math.abs(f)?(this._x=Math.atan2(q,k),this._y=Math.atan2(g,e)):(this._x=Math.atan2(-n,d),this._y=0)):console.warn("THREE.Euler: .setFromRotationMatrix() given unsupported order: "+b);this._order=b;this.onChangeCallback();return this},setFromQuaternion:function(a,b,c){var d=THREE.Math.clamp,e=a.x*a.x,f=a.y*a.y,g=a.z*a.z,h=a.w*a.w;b=b||this._order;"XYZ"===b?(this._x=Math.atan2(2*(a.x*a.w-a.y*a.z),h-e-f+g),this._y=Math.asin(d(2*(a.x*a.z+a.y*a.w),-1,1)),this._z=Math.atan2(2*(a.z*a.w-a.x* -a.y),h+e-f-g)):"YXZ"===b?(this._x=Math.asin(d(2*(a.x*a.w-a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h-e-f+g),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h-e+f-g)):"ZXY"===b?(this._x=Math.asin(d(2*(a.x*a.w+a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.y*a.w-a.z*a.x),h-e-f+g),this._z=Math.atan2(2*(a.z*a.w-a.x*a.y),h-e+f-g)):"ZYX"===b?(this._x=Math.atan2(2*(a.x*a.w+a.z*a.y),h-e-f+g),this._y=Math.asin(d(2*(a.y*a.w-a.x*a.z),-1,1)),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h+e-f-g)):"YZX"===b?(this._x=Math.atan2(2* -(a.x*a.w-a.z*a.y),h-e+f-g),this._y=Math.atan2(2*(a.y*a.w-a.x*a.z),h+e-f-g),this._z=Math.asin(d(2*(a.x*a.y+a.z*a.w),-1,1))):"XZY"===b?(this._x=Math.atan2(2*(a.x*a.w+a.y*a.z),h-e+f-g),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h+e-f-g),this._z=Math.asin(d(2*(a.z*a.w-a.x*a.y),-1,1))):console.warn("THREE.Euler: .setFromQuaternion() given unsupported order: "+b);this._order=b;if(!1!==c)this.onChangeCallback();return this},reorder:function(){var a=new THREE.Quaternion;return function(b){a.setFromEuler(this); -this.setFromQuaternion(a,b)}}(),equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._order===this._order},fromArray:function(a){this._x=a[0];this._y=a[1];this._z=a[2];void 0!==a[3]&&(this._order=a[3]);this.onChangeCallback();return this},toArray:function(){return[this._x,this._y,this._z,this._order]},onChange:function(a){this.onChangeCallback=a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Euler(this._x,this._y,this._z,this._order)}}; -THREE.Line3=function(a,b){this.start=void 0!==a?a:new THREE.Vector3;this.end=void 0!==b?b:new THREE.Vector3}; -THREE.Line3.prototype={constructor:THREE.Line3,set:function(a,b){this.start.copy(a);this.end.copy(b);return this},copy:function(a){this.start.copy(a.start);this.end.copy(a.end);return this},center:function(a){return(a||new THREE.Vector3).addVectors(this.start,this.end).multiplyScalar(.5)},delta:function(a){return(a||new THREE.Vector3).subVectors(this.end,this.start)},distanceSq:function(){return this.start.distanceToSquared(this.end)},distance:function(){return this.start.distanceTo(this.end)},at:function(a, -b){var c=b||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},closestPointToPointParameter:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d){a.subVectors(c,this.start);b.subVectors(this.end,this.start);var e=b.dot(b),e=b.dot(a)/e;d&&(e=THREE.Math.clamp(e,0,1));return e}}(),closestPointToPoint:function(a,b,c){a=this.closestPointToPointParameter(a,b);c=c||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},applyMatrix4:function(a){this.start.applyMatrix4(a); -this.end.applyMatrix4(a);return this},equals:function(a){return a.start.equals(this.start)&&a.end.equals(this.end)},clone:function(){return(new THREE.Line3).copy(this)}};THREE.Box2=function(a,b){this.min=void 0!==a?a:new THREE.Vector2(Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector2(-Infinity,-Infinity)}; -THREE.Box2.prototype={constructor:THREE.Box2,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector2).set((a.x-this.min.x)/(this.max.x-this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.y -this.max.y?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector2).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector2;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},translate:function(a){this.min.add(a);this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&& -a.max.equals(this.max)},clone:function(){return(new THREE.Box2).copy(this)}};THREE.Box3=function(a,b){this.min=void 0!==a?a:new THREE.Vector3(Infinity,Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector3(-Infinity,-Infinity,-Infinity)}; -THREE.Box3.prototype={constructor:THREE.Box3,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y||a.zthis.max.z?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y&&this.min.z<=a.min.z&&a.max.z<=this.max.z?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector3).set((a.x-this.min.x)/(this.max.x- -this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y),(a.z-this.min.z)/(this.max.z-this.min.z))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.ythis.max.y||a.max.zthis.max.z?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector3).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),getBoundingSphere:function(){var a= -new THREE.Vector3;return function(b){b=b||new THREE.Sphere;b.center=this.center();b.radius=.5*this.size(a).length();return b}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},applyMatrix4:function(){var a=[new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3];return function(b){a[0].set(this.min.x,this.min.y, -this.min.z).applyMatrix4(b);a[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(b);a[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(b);a[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(b);a[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(b);a[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(b);a[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(b);a[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(b);this.makeEmpty();this.setFromPoints(a);return this}}(),translate:function(a){this.min.add(a); -this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&&a.max.equals(this.max)},clone:function(){return(new THREE.Box3).copy(this)}};THREE.Matrix3=function(){this.elements=new Float32Array([1,0,0,0,1,0,0,0,1]);0this.determinant()&&(g=-g);c.x=f[12];c.y=f[13];c.z=f[14];b.elements.set(this.elements);c=1/g;var f=1/h,n=1/k;b.elements[0]*=c;b.elements[1]*= -c;b.elements[2]*=c;b.elements[4]*=f;b.elements[5]*=f;b.elements[6]*=f;b.elements[8]*=n;b.elements[9]*=n;b.elements[10]*=n;d.setFromRotationMatrix(b);e.x=g;e.y=h;e.z=k;return this}}(),makeFrustum:function(a,b,c,d,e,f){var g=this.elements;g[0]=2*e/(b-a);g[4]=0;g[8]=(b+a)/(b-a);g[12]=0;g[1]=0;g[5]=2*e/(d-c);g[9]=(d+c)/(d-c);g[13]=0;g[2]=0;g[6]=0;g[10]=-(f+e)/(f-e);g[14]=-2*f*e/(f-e);g[3]=0;g[7]=0;g[11]=-1;g[15]=0;return this},makePerspective:function(a,b,c,d){a=c*Math.tan(THREE.Math.degToRad(.5*a)); -var e=-a;return this.makeFrustum(e*b,a*b,e,a,c,d)},makeOrthographic:function(a,b,c,d,e,f){var g=this.elements,h=b-a,k=c-d,n=f-e;g[0]=2/h;g[4]=0;g[8]=0;g[12]=-((b+a)/h);g[1]=0;g[5]=2/k;g[9]=0;g[13]=-((c+d)/k);g[2]=0;g[6]=0;g[10]=-2/n;g[14]=-((f+e)/n);g[3]=0;g[7]=0;g[11]=0;g[15]=1;return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements;return[a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15]]},clone:function(){return(new THREE.Matrix4).fromArray(this.elements)}}; -THREE.Ray=function(a,b){this.origin=void 0!==a?a:new THREE.Vector3;this.direction=void 0!==b?b:new THREE.Vector3}; -THREE.Ray.prototype={constructor:THREE.Ray,set:function(a,b){this.origin.copy(a);this.direction.copy(b);return this},copy:function(a){this.origin.copy(a.origin);this.direction.copy(a.direction);return this},at:function(a,b){return(b||new THREE.Vector3).copy(this.direction).multiplyScalar(a).add(this.origin)},recast:function(){var a=new THREE.Vector3;return function(b){this.origin.copy(this.at(b,a));return this}}(),closestPointToPoint:function(a,b){var c=b||new THREE.Vector3;c.subVectors(a,this.origin); -var d=c.dot(this.direction);return 0>d?c.copy(this.origin):c.copy(this.direction).multiplyScalar(d).add(this.origin)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){var c=a.subVectors(b,this.origin).dot(this.direction);if(0>c)return this.origin.distanceTo(b);a.copy(this.direction).multiplyScalar(c).add(this.origin);return a.distanceTo(b)}}(),distanceSqToSegment:function(a,b,c,d){var e=a.clone().add(b).multiplyScalar(.5),f=b.clone().sub(a).normalize(),g=.5*a.distanceTo(b),h= -this.origin.clone().sub(e);a=-this.direction.dot(f);b=h.dot(this.direction);var k=-h.dot(f),n=h.lengthSq(),p=Math.abs(1-a*a),q,m;0<=p?(h=a*k-b,q=a*b-k,m=g*p,0<=h?q>=-m?q<=m?(g=1/p,h*=g,q*=g,a=h*(h+a*q+2*b)+q*(a*h+q+2*k)+n):(q=g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):(q=-g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):q<=-m?(h=Math.max(0,-(-a*g+b)),q=0f)return null;f=Math.sqrt(f-e);e=d-f; -d+=f;return 0>e&&0>d?null:0>e?this.at(d,c):this.at(e,c)}}(),isIntersectionPlane:function(a){var b=a.distanceToPoint(this.origin);return 0===b||0>a.normal.dot(this.direction)*b?!0:!1},distanceToPlane:function(a){var b=a.normal.dot(this.direction);if(0==b)return 0==a.distanceToPoint(this.origin)?0:null;a=-(this.origin.dot(a.normal)+a.constant)/b;return 0<=a?a:null},intersectPlane:function(a,b){var c=this.distanceToPlane(a);return null===c?null:this.at(c,b)},isIntersectionBox:function(){var a=new THREE.Vector3; -return function(b){return null!==this.intersectBox(b,a)}}(),intersectBox:function(a,b){var c,d,e,f,g;d=1/this.direction.x;f=1/this.direction.y;g=1/this.direction.z;var h=this.origin;0<=d?(c=(a.min.x-h.x)*d,d*=a.max.x-h.x):(c=(a.max.x-h.x)*d,d*=a.min.x-h.x);0<=f?(e=(a.min.y-h.y)*f,f*=a.max.y-h.y):(e=(a.max.y-h.y)*f,f*=a.min.y-h.y);if(c>f||e>d)return null;if(e>c||c!==c)c=e;if(fg||e>d)return null;if(e>c||c!== -c)c=e;if(gd?null:this.at(0<=c?c:d,b)},intersectTriangle:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Vector3;return function(e,f,g,h,k){b.subVectors(f,e);c.subVectors(g,e);d.crossVectors(b,c);f=this.direction.dot(d);if(0f)h=-1,f=-f;else return null;a.subVectors(this.origin,e);e=h*this.direction.dot(c.crossVectors(a,c));if(0>e)return null;g=h*this.direction.dot(b.cross(a));if(0>g||e+g>f)return null; -e=-h*a.dot(d);return 0>e?null:this.at(e/f,k)}}(),applyMatrix4:function(a){this.direction.add(this.origin).applyMatrix4(a);this.origin.applyMatrix4(a);this.direction.sub(this.origin);this.direction.normalize();return this},equals:function(a){return a.origin.equals(this.origin)&&a.direction.equals(this.direction)},clone:function(){return(new THREE.Ray).copy(this)}};THREE.Sphere=function(a,b){this.center=void 0!==a?a:new THREE.Vector3;this.radius=void 0!==b?b:0}; -THREE.Sphere.prototype={constructor:THREE.Sphere,set:function(a,b){this.center.copy(a);this.radius=b;return this},setFromPoints:function(){var a=new THREE.Box3;return function(b,c){var d=this.center;void 0!==c?d.copy(c):a.setFromPoints(b).center(d);for(var e=0,f=0,g=b.length;f=this.radius},containsPoint:function(a){return a.distanceToSquared(this.center)<= -this.radius*this.radius},distanceToPoint:function(a){return a.distanceTo(this.center)-this.radius},intersectsSphere:function(a){var b=this.radius+a.radius;return a.center.distanceToSquared(this.center)<=b*b},clampPoint:function(a,b){var c=this.center.distanceToSquared(a),d=b||new THREE.Vector3;d.copy(a);c>this.radius*this.radius&&(d.sub(this.center).normalize(),d.multiplyScalar(this.radius).add(this.center));return d},getBoundingBox:function(a){a=a||new THREE.Box3;a.set(this.center,this.center);a.expandByScalar(this.radius); -return a},applyMatrix4:function(a){this.center.applyMatrix4(a);this.radius*=a.getMaxScaleOnAxis();return this},translate:function(a){this.center.add(a);return this},equals:function(a){return a.center.equals(this.center)&&a.radius===this.radius},clone:function(){return(new THREE.Sphere).copy(this)}}; -THREE.Frustum=function(a,b,c,d,e,f){this.planes=[void 0!==a?a:new THREE.Plane,void 0!==b?b:new THREE.Plane,void 0!==c?c:new THREE.Plane,void 0!==d?d:new THREE.Plane,void 0!==e?e:new THREE.Plane,void 0!==f?f:new THREE.Plane]}; -THREE.Frustum.prototype={constructor:THREE.Frustum,set:function(a,b,c,d,e,f){var g=this.planes;g[0].copy(a);g[1].copy(b);g[2].copy(c);g[3].copy(d);g[4].copy(e);g[5].copy(f);return this},copy:function(a){for(var b=this.planes,c=0;6>c;c++)b[c].copy(a.planes[c]);return this},setFromMatrix:function(a){var b=this.planes,c=a.elements;a=c[0];var d=c[1],e=c[2],f=c[3],g=c[4],h=c[5],k=c[6],n=c[7],p=c[8],q=c[9],m=c[10],r=c[11],t=c[12],s=c[13],u=c[14],c=c[15];b[0].setComponents(f-a,n-g,r-p,c-t).normalize();b[1].setComponents(f+ -a,n+g,r+p,c+t).normalize();b[2].setComponents(f+d,n+h,r+q,c+s).normalize();b[3].setComponents(f-d,n-h,r-q,c-s).normalize();b[4].setComponents(f-e,n-k,r-m,c-u).normalize();b[5].setComponents(f+e,n+k,r+m,c+u).normalize();return this},intersectsObject:function(){var a=new THREE.Sphere;return function(b){var c=b.geometry;null===c.boundingSphere&&c.computeBoundingSphere();a.copy(c.boundingSphere);a.applyMatrix4(b.matrixWorld);return this.intersectsSphere(a)}}(),intersectsSphere:function(a){var b=this.planes, -c=a.center;a=-a.radius;for(var d=0;6>d;d++)if(b[d].distanceToPoint(c)e;e++){var f=d[e];a.x=0g&&0>f)return!1}return!0}}(), -containsPoint:function(a){for(var b=this.planes,c=0;6>c;c++)if(0>b[c].distanceToPoint(a))return!1;return!0},clone:function(){return(new THREE.Frustum).copy(this)}};THREE.Plane=function(a,b){this.normal=void 0!==a?a:new THREE.Vector3(1,0,0);this.constant=void 0!==b?b:0}; -THREE.Plane.prototype={constructor:THREE.Plane,set:function(a,b){this.normal.copy(a);this.constant=b;return this},setComponents:function(a,b,c,d){this.normal.set(a,b,c);this.constant=d;return this},setFromNormalAndCoplanarPoint:function(a,b){this.normal.copy(a);this.constant=-b.dot(this.normal);return this},setFromCoplanarPoints:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d,e){d=a.subVectors(e,d).cross(b.subVectors(c,d)).normalize();this.setFromNormalAndCoplanarPoint(d, -c);return this}}(),copy:function(a){this.normal.copy(a.normal);this.constant=a.constant;return this},normalize:function(){var a=1/this.normal.length();this.normal.multiplyScalar(a);this.constant*=a;return this},negate:function(){this.constant*=-1;this.normal.negate();return this},distanceToPoint:function(a){return this.normal.dot(a)+this.constant},distanceToSphere:function(a){return this.distanceToPoint(a.center)-a.radius},projectPoint:function(a,b){return this.orthoPoint(a,b).sub(a).negate()},orthoPoint:function(a, -b){var c=this.distanceToPoint(a);return(b||new THREE.Vector3).copy(this.normal).multiplyScalar(c)},isIntersectionLine:function(a){var b=this.distanceToPoint(a.start);a=this.distanceToPoint(a.end);return 0>b&&0a&&0f||1e;e++)8==e||13==e||18==e||23==e?b[e]="-":14==e?b[e]="4":(2>=c&&(c=33554432+16777216*Math.random()|0),d=c&15,c>>=4,b[e]=a[19==e?d&3|8:d]);return b.join("")}}(),clamp:function(a,b,c){return ac?c:a},clampBottom:function(a,b){return a=c)return 1;a=(a-b)/(c-b);return a*a*(3-2*a)},smootherstep:function(a,b,c){if(a<=b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*a*(a*(6*a-15)+10)},random16:function(){return(65280*Math.random()+255*Math.random())/65535},randInt:function(a,b){return a+Math.floor(Math.random()*(b-a+1))},randFloat:function(a,b){return a+Math.random()*(b-a)},randFloatSpread:function(a){return a*(.5-Math.random())},degToRad:function(){var a=Math.PI/180;return function(b){return b*a}}(),radToDeg:function(){var a= -180/Math.PI;return function(b){return b*a}}(),isPowerOfTwo:function(a){return 0===(a&a-1)&&0!==a}}; -THREE.Spline=function(a){function b(a,b,c,d,e,f,g){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*g+(-3*(b-c)-2*a-d)*f+a*e+b}this.points=a;var c=[],d={x:0,y:0,z:0},e,f,g,h,k,n,p,q,m;this.initFromArray=function(a){this.points=[];for(var b=0;bthis.points.length-2?this.points.length-1:f+1;c[3]=f>this.points.length-3?this.points.length-1:f+ -2;n=this.points[c[0]];p=this.points[c[1]];q=this.points[c[2]];m=this.points[c[3]];h=g*g;k=g*h;d.x=b(n.x,p.x,q.x,m.x,g,h,k);d.y=b(n.y,p.y,q.y,m.y,g,h,k);d.z=b(n.z,p.z,q.z,m.z,g,h,k);return d};this.getControlPointsArray=function(){var a,b,c=this.points.length,d=[];for(a=0;a=b.x+b.y}}(); -THREE.Triangle.prototype={constructor:THREE.Triangle,set:function(a,b,c){this.a.copy(a);this.b.copy(b);this.c.copy(c);return this},setFromPointsAndIndices:function(a,b,c,d){this.a.copy(a[b]);this.b.copy(a[c]);this.c.copy(a[d]);return this},copy:function(a){this.a.copy(a.a);this.b.copy(a.b);this.c.copy(a.c);return this},area:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){a.subVectors(this.c,this.b);b.subVectors(this.a,this.b);return.5*a.cross(b).length()}}(),midpoint:function(a){return(a|| -new THREE.Vector3).addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)},normal:function(a){return THREE.Triangle.normal(this.a,this.b,this.c,a)},plane:function(a){return(a||new THREE.Plane).setFromCoplanarPoints(this.a,this.b,this.c)},barycoordFromPoint:function(a,b){return THREE.Triangle.barycoordFromPoint(a,this.a,this.b,this.c,b)},containsPoint:function(a){return THREE.Triangle.containsPoint(a,this.a,this.b,this.c)},equals:function(a){return a.a.equals(this.a)&&a.b.equals(this.b)&&a.c.equals(this.c)}, -clone:function(){return(new THREE.Triangle).copy(this)}};THREE.Clock=function(a){this.autoStart=void 0!==a?a:!0;this.elapsedTime=this.oldTime=this.startTime=0;this.running=!1}; -THREE.Clock.prototype={constructor:THREE.Clock,start:function(){this.oldTime=this.startTime=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now();this.running=!0},stop:function(){this.getElapsedTime();this.running=!1},getElapsedTime:function(){this.getDelta();return this.elapsedTime},getDelta:function(){var a=0;this.autoStart&&!this.running&&this.start();if(this.running){var b=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now(), -a=.001*(b-this.oldTime);this.oldTime=b;this.elapsedTime+=a}return a}};THREE.EventDispatcher=function(){}; -THREE.EventDispatcher.prototype={constructor:THREE.EventDispatcher,apply:function(a){a.addEventListener=THREE.EventDispatcher.prototype.addEventListener;a.hasEventListener=THREE.EventDispatcher.prototype.hasEventListener;a.removeEventListener=THREE.EventDispatcher.prototype.removeEventListener;a.dispatchEvent=THREE.EventDispatcher.prototype.dispatchEvent},addEventListener:function(a,b){void 0===this._listeners&&(this._listeners={});var c=this._listeners;void 0===c[a]&&(c[a]=[]);-1===c[a].indexOf(b)&& -c[a].push(b)},hasEventListener:function(a,b){if(void 0===this._listeners)return!1;var c=this._listeners;return void 0!==c[a]&&-1!==c[a].indexOf(b)?!0:!1},removeEventListener:function(a,b){if(void 0!==this._listeners){var c=this._listeners[a];if(void 0!==c){var d=c.indexOf(b);-1!==d&&c.splice(d,1)}}},dispatchEvent:function(a){if(void 0!==this._listeners){var b=this._listeners[a.type];if(void 0!==b){a.target=this;for(var c=[],d=b.length,e=0;eza?-1:1;h[4*a]=la.x;h[4*a+1]=la.y;h[4*a+2]=la.z;h[4*a+3]=Ga}if(void 0===this.attributes.index||void 0===this.attributes.position||void 0===this.attributes.normal||void 0===this.attributes.uv)console.warn("Missing required attributes (index, position, normal or uv) in BufferGeometry.computeTangents()");else{var c=this.attributes.index.array,d=this.attributes.position.array, -e=this.attributes.normal.array,f=this.attributes.uv.array,g=d.length/3;void 0===this.attributes.tangent&&this.addAttribute("tangent",new THREE.BufferAttribute(new Float32Array(4*g),4));for(var h=this.attributes.tangent.array,k=[],n=[],p=0;ps;s++)t=a[3*c+s],-1==m[t]?(q[2*s]=t,q[2*s+1]=-1,p++):m[t]k.index+b)for(k={start:f,count:0,index:g},h.push(k),p=0;6>p;p+=2)s=q[p+1],-1p;p+=2)t=q[p],s=q[p+1],-1===s&&(s=g++),m[t]=s,r[s]=t,e[f++]=s-k.index,k.count++}this.reorderBuffers(e,r,g);return this.offsets=h},merge:function(){console.log("BufferGeometry.merge(): TODO")},normalizeNormals:function(){for(var a=this.attributes.normal.array,b,c,d,e=0,f=a.length;ed?-1:1,e.vertexTangents[c]=new THREE.Vector4(w.x,w.y,w.z,d);this.hasTangents=!0},computeLineDistances:function(){for(var a=0,b=this.vertices,c=0,d=b.length;cd;d++)if(e[d]==e[(d+1)%3]){a.push(f);break}for(f=a.length-1;0<=f;f--)for(e=a[f],this.faces.splice(e,1),c=0,g=this.faceVertexUvs.length;ca.opacity)h.transparent=a.transparent;void 0!==a.depthTest&&(h.depthTest=a.depthTest);void 0!==a.depthWrite&&(h.depthWrite=a.depthWrite);void 0!==a.visible&&(h.visible=a.visible);void 0!==a.flipSided&&(h.side=THREE.BackSide);void 0!==a.doubleSided&&(h.side=THREE.DoubleSide);void 0!==a.wireframe&&(h.wireframe=a.wireframe);void 0!==a.vertexColors&&("face"=== -a.vertexColors?h.vertexColors=THREE.FaceColors:a.vertexColors&&(h.vertexColors=THREE.VertexColors));a.colorDiffuse?h.color=e(a.colorDiffuse):a.DbgColor&&(h.color=a.DbgColor);a.colorSpecular&&(h.specular=e(a.colorSpecular));a.colorAmbient&&(h.ambient=e(a.colorAmbient));a.colorEmissive&&(h.emissive=e(a.colorEmissive));a.transparency&&(h.opacity=a.transparency);a.specularCoef&&(h.shininess=a.specularCoef);a.mapDiffuse&&b&&d(h,"map",a.mapDiffuse,a.mapDiffuseRepeat,a.mapDiffuseOffset,a.mapDiffuseWrap, -a.mapDiffuseAnisotropy);a.mapLight&&b&&d(h,"lightMap",a.mapLight,a.mapLightRepeat,a.mapLightOffset,a.mapLightWrap,a.mapLightAnisotropy);a.mapBump&&b&&d(h,"bumpMap",a.mapBump,a.mapBumpRepeat,a.mapBumpOffset,a.mapBumpWrap,a.mapBumpAnisotropy);a.mapNormal&&b&&d(h,"normalMap",a.mapNormal,a.mapNormalRepeat,a.mapNormalOffset,a.mapNormalWrap,a.mapNormalAnisotropy);a.mapSpecular&&b&&d(h,"specularMap",a.mapSpecular,a.mapSpecularRepeat,a.mapSpecularOffset,a.mapSpecularWrap,a.mapSpecularAnisotropy);a.mapAlpha&& -b&&d(h,"alphaMap",a.mapAlpha,a.mapAlphaRepeat,a.mapAlphaOffset,a.mapAlphaWrap,a.mapAlphaAnisotropy);a.mapBumpScale&&(h.bumpScale=a.mapBumpScale);a.mapNormal?(g=THREE.ShaderLib.normalmap,k=THREE.UniformsUtils.clone(g.uniforms),k.tNormal.value=h.normalMap,a.mapNormalFactor&&k.uNormalScale.value.set(a.mapNormalFactor,a.mapNormalFactor),h.map&&(k.tDiffuse.value=h.map,k.enableDiffuse.value=!0),h.specularMap&&(k.tSpecular.value=h.specularMap,k.enableSpecular.value=!0),h.lightMap&&(k.tAO.value=h.lightMap, -k.enableAO.value=!0),k.diffuse.value.setHex(h.color),k.specular.value.setHex(h.specular),k.ambient.value.setHex(h.ambient),k.shininess.value=h.shininess,void 0!==h.opacity&&(k.opacity.value=h.opacity),g=new THREE.ShaderMaterial({fragmentShader:g.fragmentShader,vertexShader:g.vertexShader,uniforms:k,lights:!0,fog:!0}),h.transparent&&(g.transparent=!0)):g=new THREE[g](h);void 0!==a.DbgName&&(g.name=a.DbgName);return g}}; -THREE.Loader.Handlers={handlers:[],add:function(a,b){this.handlers.push(a,b)},get:function(a){for(var b=0,c=this.handlers.length;bg;g++)m=y[k++],v=u[2*m],m=u[2*m+1],v=new THREE.Vector2(v,m),2!==g&&c.faceVertexUvs[d][h].push(v),0!==g&&c.faceVertexUvs[d][h+1].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]),s.normal.copy(r.normal));if(t)for(d=0;4>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++], -G[q++],G[q]),2!==d&&r.vertexNormals.push(t),0!==d&&s.vertexNormals.push(t);p&&(p=y[k++],p=w[p],r.color.setHex(p),s.color.setHex(p));if(b)for(d=0;4>d;d++)p=y[k++],p=w[p],2!==d&&r.vertexColors.push(new THREE.Color(p)),0!==d&&s.vertexColors.push(new THREE.Color(p));c.faces.push(r);c.faces.push(s)}else{r=new THREE.Face3;r.a=y[k++];r.b=y[k++];r.c=y[k++];h&&(h=y[k++],r.materialIndex=h);h=c.faces.length;if(d)for(d=0;dg;g++)m=y[k++],v=u[2*m],m=u[2*m+1], -v=new THREE.Vector2(v,m),c.faceVertexUvs[d][h].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]));if(t)for(d=0;3>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++],G[q++],G[q]),r.vertexNormals.push(t);p&&(p=y[k++],r.color.setHex(w[p]));if(b)for(d=0;3>d;d++)p=y[k++],r.vertexColors.push(new THREE.Color(w[p]));c.faces.push(r)}})(d);(function(){var b=void 0!==a.influencesPerVertex?a.influencesPerVertex:2;if(a.skinWeights)for(var d=0,g=a.skinWeights.length;dthis.opacity&&(a.opacity=this.opacity);!1!==this.transparent&&(a.transparent=this.transparent);!1!==this.wireframe&&(a.wireframe=this.wireframe);return a},clone:function(a){void 0===a&&(a=new THREE.Material);a.name=this.name;a.side=this.side;a.opacity=this.opacity;a.transparent=this.transparent;a.blending=this.blending;a.blendSrc=this.blendSrc;a.blendDst=this.blendDst;a.blendEquation=this.blendEquation;a.depthTest=this.depthTest;a.depthWrite=this.depthWrite;a.polygonOffset=this.polygonOffset;a.polygonOffsetFactor= -this.polygonOffsetFactor;a.polygonOffsetUnits=this.polygonOffsetUnits;a.alphaTest=this.alphaTest;a.overdraw=this.overdraw;a.visible=this.visible;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Material.prototype);THREE.MaterialIdCount=0; -THREE.LineBasicMaterial=function(a){THREE.Material.call(this);this.type="LineBasicMaterial";this.color=new THREE.Color(16777215);this.linewidth=1;this.linejoin=this.linecap="round";this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.LineBasicMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.LineBasicMaterial.prototype.clone=function(){var a=new THREE.LineBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.linecap=this.linecap;a.linejoin=this.linejoin;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; -THREE.LineDashedMaterial=function(a){THREE.Material.call(this);this.type="LineDashedMaterial";this.color=new THREE.Color(16777215);this.scale=this.linewidth=1;this.dashSize=3;this.gapSize=1;this.vertexColors=!1;this.fog=!0;this.setValues(a)};THREE.LineDashedMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.LineDashedMaterial.prototype.clone=function(){var a=new THREE.LineDashedMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.scale=this.scale;a.dashSize=this.dashSize;a.gapSize=this.gapSize;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; -THREE.MeshBasicMaterial=function(a){THREE.Material.call(this);this.type="MeshBasicMaterial";this.color=new THREE.Color(16777215);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphTargets=this.skinning=!1;this.setValues(a)}; -THREE.MeshBasicMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshBasicMaterial.prototype.clone=function(){var a=new THREE.MeshBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap; -a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;return a}; -THREE.MeshLambertMaterial=function(a){THREE.Material.call(this);this.type="MeshLambertMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.wrapAround=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth= -1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshLambertMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshLambertMaterial.prototype.clone=function(){var a=new THREE.MeshLambertMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog; -a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; -THREE.MeshPhongMaterial=function(a){THREE.Material.call(this);this.type="MeshPhongMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.specular=new THREE.Color(1118481);this.shininess=30;this.wrapAround=this.metal=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.bumpMap=this.lightMap=this.map=null;this.bumpScale=1;this.normalMap=null;this.normalScale=new THREE.Vector2(1,1);this.envMap=this.alphaMap=this.specularMap=null;this.combine= -THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshPhongMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshPhongMaterial.prototype.clone=function(){var a=new THREE.MeshPhongMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.specular.copy(this.specular);a.shininess=this.shininess;a.metal=this.metal;a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.bumpMap=this.bumpMap;a.bumpScale=this.bumpScale;a.normalMap=this.normalMap;a.normalScale.copy(this.normalScale); -a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; -THREE.MeshDepthMaterial=function(a){THREE.Material.call(this);this.type="MeshDepthMaterial";this.wireframe=this.morphTargets=!1;this.wireframeLinewidth=1;this.setValues(a)};THREE.MeshDepthMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshDepthMaterial.prototype.clone=function(){var a=new THREE.MeshDepthMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a}; -THREE.MeshNormalMaterial=function(a){THREE.Material.call(this,a);this.type="MeshNormalMaterial";this.shading=THREE.FlatShading;this.wireframe=!1;this.wireframeLinewidth=1;this.morphTargets=!1;this.setValues(a)};THREE.MeshNormalMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshNormalMaterial.prototype.clone=function(){var a=new THREE.MeshNormalMaterial;THREE.Material.prototype.clone.call(this,a);a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshFaceMaterial=function(a){this.uuid=THREE.Math.generateUUID();this.type="MeshFaceMaterial";this.materials=a instanceof Array?a:[]}; -THREE.MeshFaceMaterial.prototype={constructor:THREE.MeshFaceMaterial,toJSON:function(){for(var a={metadata:{version:4.2,type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type,materials:[]},b=0,c=this.materials.length;bf)){var m=b.origin.distanceTo(n);md.far||e.push({distance:m,point:k.clone().applyMatrix4(this.matrixWorld),face:null,faceIndex:null,object:this})}}}();THREE.Line.prototype.clone=function(a){void 0===a&&(a=new THREE.Line(this.geometry,this.material,this.mode));THREE.Object3D.prototype.clone.call(this,a);return a}; -THREE.Mesh=function(a,b){THREE.Object3D.call(this);this.type="Mesh";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.MeshBasicMaterial({color:16777215*Math.random()});this.updateMorphTargets()};THREE.Mesh.prototype=Object.create(THREE.Object3D.prototype); -THREE.Mesh.prototype.updateMorphTargets=function(){if(void 0!==this.geometry.morphTargets&&0g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this})}}}else for(s=p.position.array,t=k=0,w=s.length;k -g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this}))}}else if(k instanceof THREE.Geometry)for(t=this.material instanceof THREE.MeshFaceMaterial,s=!0===t?this.material.materials:null,r=g.precision,u=k.vertices,v=0,y=k.faces.length;vg.far||h.push({distance:x,point:K,face:G,faceIndex:v,object:this}))}}}();THREE.Mesh.prototype.clone=function(a,b){void 0===a&&(a=new THREE.Mesh(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a,b);return a};THREE.Bone=function(a){THREE.Object3D.call(this);this.skin=a};THREE.Bone.prototype=Object.create(THREE.Object3D.prototype); -THREE.Skeleton=function(a,b,c){this.useVertexTexture=void 0!==c?c:!0;this.identityMatrix=new THREE.Matrix4;a=a||[];this.bones=a.slice(0);this.useVertexTexture?(this.boneTextureHeight=this.boneTextureWidth=a=256h.end&&(h.end=e);b||(b=g)}}a.firstAnimation=b}; -THREE.MorphAnimMesh.prototype.setAnimationLabel=function(a,b,c){this.geometry.animations||(this.geometry.animations={});this.geometry.animations[a]={start:b,end:c}};THREE.MorphAnimMesh.prototype.playAnimation=function(a,b){var c=this.geometry.animations[a];c?(this.setFrameRange(c.start,c.end),this.duration=(c.end-c.start)/b*1E3,this.time=0):console.warn("animation["+a+"] undefined")}; -THREE.MorphAnimMesh.prototype.updateAnimation=function(a){var b=this.duration/this.length;this.time+=this.direction*a;if(this.mirroredLoop){if(this.time>this.duration||0>this.time)this.direction*=-1,this.time>this.duration&&(this.time=this.duration,this.directionBackwards=!0),0>this.time&&(this.time=0,this.directionBackwards=!1)}else this.time%=this.duration,0>this.time&&(this.time+=this.duration);a=this.startKeyframe+THREE.Math.clamp(Math.floor(this.time/b),0,this.length-1);a!==this.currentKeyframe&& -(this.morphTargetInfluences[this.lastKeyframe]=0,this.morphTargetInfluences[this.currentKeyframe]=1,this.morphTargetInfluences[a]=0,this.lastKeyframe=this.currentKeyframe,this.currentKeyframe=a);b=this.time%b/b;this.directionBackwards&&(b=1-b);this.morphTargetInfluences[this.currentKeyframe]=b;this.morphTargetInfluences[this.lastKeyframe]=1-b}; -THREE.MorphAnimMesh.prototype.interpolateTargets=function(a,b,c){for(var d=this.morphTargetInfluences,e=0,f=d.length;e=this.objects[d].distance)this.objects[d-1].object.visible=!1,this.objects[d].object.visible=!0;else break;for(;dthis.scale.x||c.push({distance:d,point:this.position,face:null,object:this})}}();THREE.Sprite.prototype.clone=function(a){void 0===a&&(a=new THREE.Sprite(this.material));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.Particle=THREE.Sprite; -THREE.LensFlare=function(a,b,c,d,e){THREE.Object3D.call(this);this.lensFlares=[];this.positionScreen=new THREE.Vector3;this.customUpdateCallback=void 0;void 0!==a&&this.add(a,b,c,d,e)};THREE.LensFlare.prototype=Object.create(THREE.Object3D.prototype); -THREE.LensFlare.prototype.add=function(a,b,c,d,e,f){void 0===b&&(b=-1);void 0===c&&(c=0);void 0===f&&(f=1);void 0===e&&(e=new THREE.Color(16777215));void 0===d&&(d=THREE.NormalBlending);c=Math.min(c,Math.max(0,c));this.lensFlares.push({texture:a,size:b,distance:c,x:0,y:0,z:0,scale:1,rotation:1,opacity:f,color:e,blending:d})}; -THREE.LensFlare.prototype.updateLensFlares=function(){var a,b=this.lensFlares.length,c,d=2*-this.positionScreen.x,e=2*-this.positionScreen.y;for(a=0;a dashSize ) {\n\t\tdiscard;\n\t}\n\tgl_FragColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.fog_fragment, -"}"].join("\n")},depth:{uniforms:{mNear:{type:"f",value:1},mFar:{type:"f",value:2E3},opacity:{type:"f",value:1}},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float mNear;\nuniform float mFar;\nuniform float opacity;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment, -"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\t#else\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\t#endif\n\tfloat color = 1.0 - smoothstep( mNear, mFar, depth );\n\tgl_FragColor = vec4( vec3( color ), opacity );\n}"].join("\n")},normal:{uniforms:{opacity:{type:"f",value:1}},vertexShader:["varying vec3 vNormal;",THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvNormal = normalize( normalMatrix * normal );", -THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float opacity;\nvarying vec3 vNormal;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = vec4( 0.5 * normalize( vNormal ) + 0.5, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},normalmap:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{enableAO:{type:"i", -value:0},enableDiffuse:{type:"i",value:0},enableSpecular:{type:"i",value:0},enableReflection:{type:"i",value:0},enableDisplacement:{type:"i",value:0},tDisplacement:{type:"t",value:null},tDiffuse:{type:"t",value:null},tCube:{type:"t",value:null},tNormal:{type:"t",value:null},tSpecular:{type:"t",value:null},tAO:{type:"t",value:null},uNormalScale:{type:"v2",value:new THREE.Vector2(1,1)},uDisplacementBias:{type:"f",value:0},uDisplacementScale:{type:"f",value:1},diffuse:{type:"c",value:new THREE.Color(16777215)}, -specular:{type:"c",value:new THREE.Color(1118481)},ambient:{type:"c",value:new THREE.Color(16777215)},shininess:{type:"f",value:30},opacity:{type:"f",value:1},useRefract:{type:"i",value:0},refractionRatio:{type:"f",value:.98},reflectivity:{type:"f",value:.5},uOffset:{type:"v2",value:new THREE.Vector2(0,0)},uRepeat:{type:"v2",value:new THREE.Vector2(1,1)},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),fragmentShader:["uniform vec3 ambient;\nuniform vec3 diffuse;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\nuniform bool enableDiffuse;\nuniform bool enableSpecular;\nuniform bool enableAO;\nuniform bool enableReflection;\nuniform sampler2D tDiffuse;\nuniform sampler2D tNormal;\nuniform sampler2D tSpecular;\nuniform sampler2D tAO;\nuniform samplerCube tCube;\nuniform vec2 uNormalScale;\nuniform bool useRefract;\nuniform float refractionRatio;\nuniform float reflectivity;\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nuniform vec3 ambientLightColor;\n#if MAX_DIR_LIGHTS > 0\n\tuniform vec3 directionalLightColor[ MAX_DIR_LIGHTS ];\n\tuniform vec3 directionalLightDirection[ MAX_DIR_LIGHTS ];\n#endif\n#if MAX_HEMI_LIGHTS > 0\n\tuniform vec3 hemisphereLightSkyColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightGroundColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightDirection[ MAX_HEMI_LIGHTS ];\n#endif\n#if MAX_POINT_LIGHTS > 0\n\tuniform vec3 pointLightColor[ MAX_POINT_LIGHTS ];\n\tuniform vec3 pointLightPosition[ MAX_POINT_LIGHTS ];\n\tuniform float pointLightDistance[ MAX_POINT_LIGHTS ];\n#endif\n#if MAX_SPOT_LIGHTS > 0\n\tuniform vec3 spotLightColor[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightPosition[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightDirection[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightAngleCos[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightExponent[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightDistance[ MAX_SPOT_LIGHTS ];\n#endif\n#ifdef WRAP_AROUND\n\tuniform vec3 wrapRGB;\n#endif\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", -THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment,"\tgl_FragColor = vec4( vec3( 1.0 ), opacity );\n\tvec3 specularTex = vec3( 1.0 );\n\tvec3 normalTex = texture2D( tNormal, vUv ).xyz * 2.0 - 1.0;\n\tnormalTex.xy *= uNormalScale;\n\tnormalTex = normalize( normalTex );\n\tif( enableDiffuse ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 texelColor = texture2D( tDiffuse, vUv );\n\t\t\ttexelColor.xyz *= texelColor.xyz;\n\t\t\tgl_FragColor = gl_FragColor * texelColor;\n\t\t#else\n\t\t\tgl_FragColor = gl_FragColor * texture2D( tDiffuse, vUv );\n\t\t#endif\n\t}\n\tif( enableAO ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 aoColor = texture2D( tAO, vUv );\n\t\t\taoColor.xyz *= aoColor.xyz;\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * aoColor.xyz;\n\t\t#else\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * texture2D( tAO, vUv ).xyz;\n\t\t#endif\n\t}", -THREE.ShaderChunk.alphatest_fragment,"\tif( enableSpecular )\n\t\tspecularTex = texture2D( tSpecular, vUv ).xyz;\n\tmat3 tsb = mat3( normalize( vTangent ), normalize( vBinormal ), normalize( vNormal ) );\n\tvec3 finalNormal = tsb * normalTex;\n\t#ifdef FLIP_SIDED\n\t\tfinalNormal = -finalNormal;\n\t#endif\n\tvec3 normal = normalize( finalNormal );\n\tvec3 viewPosition = normalize( vViewPosition );\n\t#if MAX_POINT_LIGHTS > 0\n\t\tvec3 pointDiffuse = vec3( 0.0 );\n\t\tvec3 pointSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_POINT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( pointLightPosition[ i ], 1.0 );\n\t\t\tvec3 pointVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat pointDistance = 1.0;\n\t\t\tif ( pointLightDistance[ i ] > 0.0 )\n\t\t\t\tpointDistance = 1.0 - min( ( length( pointVector ) / pointLightDistance[ i ] ), 1.0 );\n\t\t\tpointVector = normalize( pointVector );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat pointDiffuseWeightFull = max( dot( normal, pointVector ), 0.0 );\n\t\t\t\tfloat pointDiffuseWeightHalf = max( 0.5 * dot( normal, pointVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 pointDiffuseWeight = mix( vec3( pointDiffuseWeightFull ), vec3( pointDiffuseWeightHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat pointDiffuseWeight = max( dot( normal, pointVector ), 0.0 );\n\t\t\t#endif\n\t\t\tpointDiffuse += pointDistance * pointLightColor[ i ] * diffuse * pointDiffuseWeight;\n\t\t\tvec3 pointHalfVector = normalize( pointVector + viewPosition );\n\t\t\tfloat pointDotNormalHalf = max( dot( normal, pointHalfVector ), 0.0 );\n\t\t\tfloat pointSpecularWeight = specularTex.r * max( pow( pointDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( pointVector, pointHalfVector ), 0.0 ), 5.0 );\n\t\t\tpointSpecular += schlick * pointLightColor[ i ] * pointSpecularWeight * pointDiffuseWeight * pointDistance * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\tvec3 spotDiffuse = vec3( 0.0 );\n\t\tvec3 spotSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_SPOT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( spotLightPosition[ i ], 1.0 );\n\t\t\tvec3 spotVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat spotDistance = 1.0;\n\t\t\tif ( spotLightDistance[ i ] > 0.0 )\n\t\t\t\tspotDistance = 1.0 - min( ( length( spotVector ) / spotLightDistance[ i ] ), 1.0 );\n\t\t\tspotVector = normalize( spotVector );\n\t\t\tfloat spotEffect = dot( spotLightDirection[ i ], normalize( spotLightPosition[ i ] - vWorldPosition ) );\n\t\t\tif ( spotEffect > spotLightAngleCos[ i ] ) {\n\t\t\t\tspotEffect = max( pow( max( spotEffect, 0.0 ), spotLightExponent[ i ] ), 0.0 );\n\t\t\t\t#ifdef WRAP_AROUND\n\t\t\t\t\tfloat spotDiffuseWeightFull = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t\tfloat spotDiffuseWeightHalf = max( 0.5 * dot( normal, spotVector ) + 0.5, 0.0 );\n\t\t\t\t\tvec3 spotDiffuseWeight = mix( vec3( spotDiffuseWeightFull ), vec3( spotDiffuseWeightHalf ), wrapRGB );\n\t\t\t\t#else\n\t\t\t\t\tfloat spotDiffuseWeight = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t#endif\n\t\t\t\tspotDiffuse += spotDistance * spotLightColor[ i ] * diffuse * spotDiffuseWeight * spotEffect;\n\t\t\t\tvec3 spotHalfVector = normalize( spotVector + viewPosition );\n\t\t\t\tfloat spotDotNormalHalf = max( dot( normal, spotHalfVector ), 0.0 );\n\t\t\t\tfloat spotSpecularWeight = specularTex.r * max( pow( spotDotNormalHalf, shininess ), 0.0 );\n\t\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( spotVector, spotHalfVector ), 0.0 ), 5.0 );\n\t\t\t\tspotSpecular += schlick * spotLightColor[ i ] * spotSpecularWeight * spotDiffuseWeight * spotDistance * specularNormalization * spotEffect;\n\t\t\t}\n\t\t}\n\t#endif\n\t#if MAX_DIR_LIGHTS > 0\n\t\tvec3 dirDiffuse = vec3( 0.0 );\n\t\tvec3 dirSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_DIR_LIGHTS; i++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( directionalLightDirection[ i ], 0.0 );\n\t\t\tvec3 dirVector = normalize( lDirection.xyz );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat directionalLightWeightingFull = max( dot( normal, dirVector ), 0.0 );\n\t\t\t\tfloat directionalLightWeightingHalf = max( 0.5 * dot( normal, dirVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 dirDiffuseWeight = mix( vec3( directionalLightWeightingFull ), vec3( directionalLightWeightingHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat dirDiffuseWeight = max( dot( normal, dirVector ), 0.0 );\n\t\t\t#endif\n\t\t\tdirDiffuse += directionalLightColor[ i ] * diffuse * dirDiffuseWeight;\n\t\t\tvec3 dirHalfVector = normalize( dirVector + viewPosition );\n\t\t\tfloat dirDotNormalHalf = max( dot( normal, dirHalfVector ), 0.0 );\n\t\t\tfloat dirSpecularWeight = specularTex.r * max( pow( dirDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( dirVector, dirHalfVector ), 0.0 ), 5.0 );\n\t\t\tdirSpecular += schlick * directionalLightColor[ i ] * dirSpecularWeight * dirDiffuseWeight * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\tvec3 hemiDiffuse = vec3( 0.0 );\n\t\tvec3 hemiSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_HEMI_LIGHTS; i ++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( hemisphereLightDirection[ i ], 0.0 );\n\t\t\tvec3 lVector = normalize( lDirection.xyz );\n\t\t\tfloat dotProduct = dot( normal, lVector );\n\t\t\tfloat hemiDiffuseWeight = 0.5 * dotProduct + 0.5;\n\t\t\tvec3 hemiColor = mix( hemisphereLightGroundColor[ i ], hemisphereLightSkyColor[ i ], hemiDiffuseWeight );\n\t\t\themiDiffuse += diffuse * hemiColor;\n\t\t\tvec3 hemiHalfVectorSky = normalize( lVector + viewPosition );\n\t\t\tfloat hemiDotNormalHalfSky = 0.5 * dot( normal, hemiHalfVectorSky ) + 0.5;\n\t\t\tfloat hemiSpecularWeightSky = specularTex.r * max( pow( max( hemiDotNormalHalfSky, 0.0 ), shininess ), 0.0 );\n\t\t\tvec3 lVectorGround = -lVector;\n\t\t\tvec3 hemiHalfVectorGround = normalize( lVectorGround + viewPosition );\n\t\t\tfloat hemiDotNormalHalfGround = 0.5 * dot( normal, hemiHalfVectorGround ) + 0.5;\n\t\t\tfloat hemiSpecularWeightGround = specularTex.r * max( pow( max( hemiDotNormalHalfGround, 0.0 ), shininess ), 0.0 );\n\t\t\tfloat dotProductGround = dot( normal, lVectorGround );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlickSky = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVector, hemiHalfVectorSky ), 0.0 ), 5.0 );\n\t\t\tvec3 schlickGround = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVectorGround, hemiHalfVectorGround ), 0.0 ), 5.0 );\n\t\t\themiSpecular += hemiColor * specularNormalization * ( schlickSky * hemiSpecularWeightSky * max( dotProduct, 0.0 ) + schlickGround * hemiSpecularWeightGround * max( dotProductGround, 0.0 ) );\n\t\t}\n\t#endif\n\tvec3 totalDiffuse = vec3( 0.0 );\n\tvec3 totalSpecular = vec3( 0.0 );\n\t#if MAX_DIR_LIGHTS > 0\n\t\ttotalDiffuse += dirDiffuse;\n\t\ttotalSpecular += dirSpecular;\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\ttotalDiffuse += hemiDiffuse;\n\t\ttotalSpecular += hemiSpecular;\n\t#endif\n\t#if MAX_POINT_LIGHTS > 0\n\t\ttotalDiffuse += pointDiffuse;\n\t\ttotalSpecular += pointSpecular;\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\ttotalDiffuse += spotDiffuse;\n\t\ttotalSpecular += spotSpecular;\n\t#endif\n\t#ifdef METAL\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient + totalSpecular );\n\t#else\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient ) + totalSpecular;\n\t#endif\n\tif ( enableReflection ) {\n\t\tvec3 vReflect;\n\t\tvec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );\n\t\tif ( useRefract ) {\n\t\t\tvReflect = refract( cameraToVertex, normal, refractionRatio );\n\t\t} else {\n\t\t\tvReflect = reflect( cameraToVertex, normal );\n\t\t}\n\t\tvec4 cubeColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );\n\t\t#ifdef GAMMA_INPUT\n\t\t\tcubeColor.xyz *= cubeColor.xyz;\n\t\t#endif\n\t\tgl_FragColor.xyz = mix( gl_FragColor.xyz, cubeColor.xyz, specularTex.r * reflectivity );\n\t}", -THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fragment,"}"].join("\n"),vertexShader:["attribute vec4 tangent;\nuniform vec2 uOffset;\nuniform vec2 uRepeat;\nuniform bool enableDisplacement;\n#ifdef VERTEX_TEXTURES\n\tuniform sampler2D tDisplacement;\n\tuniform float uDisplacementScale;\n\tuniform float uDisplacementBias;\n#endif\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", -THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.skinnormal_vertex,"\t#ifdef USE_SKINNING\n\t\tvNormal = normalize( normalMatrix * skinnedNormal.xyz );\n\t\tvec4 skinnedTangent = skinMatrix * vec4( tangent.xyz, 0.0 );\n\t\tvTangent = normalize( normalMatrix * skinnedTangent.xyz );\n\t#else\n\t\tvNormal = normalize( normalMatrix * normal );\n\t\tvTangent = normalize( normalMatrix * tangent.xyz );\n\t#endif\n\tvBinormal = normalize( cross( vNormal, vTangent ) * tangent.w );\n\tvUv = uv * uRepeat + uOffset;\n\tvec3 displacedPosition;\n\t#ifdef VERTEX_TEXTURES\n\t\tif ( enableDisplacement ) {\n\t\t\tvec3 dv = texture2D( tDisplacement, uv ).xyz;\n\t\t\tfloat df = uDisplacementScale * dv.x + uDisplacementBias;\n\t\t\tdisplacedPosition = position + normalize( normal ) * df;\n\t\t} else {\n\t\t\t#ifdef USE_SKINNING\n\t\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\t\tdisplacedPosition = skinned.xyz;\n\t\t\t#else\n\t\t\t\tdisplacedPosition = position;\n\t\t\t#endif\n\t\t}\n\t#else\n\t\t#ifdef USE_SKINNING\n\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\tdisplacedPosition = skinned.xyz;\n\t\t#else\n\t\t\tdisplacedPosition = position;\n\t\t#endif\n\t#endif\n\tvec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );\n\tvec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );\n\tgl_Position = projectionMatrix * mvPosition;", -THREE.ShaderChunk.logdepthbuf_vertex,"\tvWorldPosition = worldPosition.xyz;\n\tvViewPosition = -mvPosition.xyz;\n\t#ifdef USE_SHADOWMAP\n\t\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\t\t\tvShadowCoord[ i ] = shadowMatrix[ i ] * worldPosition;\n\t\t}\n\t#endif\n}"].join("\n")},cube:{uniforms:{tCube:{type:"t",value:null},tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvec4 worldPosition = modelMatrix * vec4( position, 1.0 );\n\tvWorldPosition = worldPosition.xyz;\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", -THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform samplerCube tCube;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},depthRGBA:{uniforms:{},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex, -"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:[THREE.ShaderChunk.logdepthbuf_pars_fragment,"vec4 pack_depth( const in float depth ) {\n\tconst vec4 bit_shift = vec4( 256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0 );\n\tconst vec4 bit_mask = vec4( 0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0 );\n\tvec4 res = mod( depth * bit_shift * vec4( 255 ), vec4( 256 ) ) / vec4( 255 );\n\tres -= res.xxyz * bit_mask;\n\treturn res;\n}\nvoid main() {", -THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragDepthEXT );\n\t#else\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );\n\t#endif\n}"].join("\n")}}; -THREE.WebGLRenderer=function(a){function b(a){var b=a.geometry;a=a.material;var c=b.vertices.length;if(a.attributes){void 0===b.__webglCustomAttributesList&&(b.__webglCustomAttributesList=[]);for(var d in a.attributes){var e=a.attributes[d];if(!e.__webglInitialized||e.createUniqueBuffers){e.__webglInitialized=!0;var f=1;"v2"===e.type?f=2:"v3"===e.type?f=3:"v4"===e.type?f=4:"c"===e.type&&(f=3);e.size=f;e.array=new Float32Array(c*f);e.buffer=l.createBuffer();e.buffer.belongsToAttribute=d;e.needsUpdate= -!0}b.__webglCustomAttributesList.push(e)}}}function c(a,b){var c=b.geometry,e=a.faces3,f=3*e.length,g=1*e.length,h=3*e.length,e=d(b,a);a.__vertexArray=new Float32Array(3*f);a.__normalArray=new Float32Array(3*f);a.__colorArray=new Float32Array(3*f);a.__uvArray=new Float32Array(2*f);1Aa;Aa++)Cb=ma[Aa],Ta[Sa]=Cb.x,Ta[Sa+1]=Cb.y,Ta[Sa+2]=Cb.z,Sa+=3;else for(Aa=0;3>Aa;Aa++)Ta[Sa]=pa.x,Ta[Sa+1]=pa.y,Ta[Sa+2]=pa.z,Sa+=3;l.bindBuffer(l.ARRAY_BUFFER,C.__webglNormalBuffer);l.bufferData(l.ARRAY_BUFFER, -Ta,S)}if(Kc&&ua){M=0;for(ea=N.length;MAa;Aa++)Oa=hb[Aa],sb[qb]=Oa.x,sb[qb+1]=Oa.y,qb+=2;0Aa;Aa++)Qb=za[Aa],fb[rb]=Qb.x,fb[rb+1]=Qb.y,rb+=2;0h&&(f[v].counter+=1,k=f[v].hash+"_"+f[v].counter,k in r||(p={id:rc++, -faces3:[],materialIndex:v,vertices:0,numMorphTargets:m,numMorphNormals:n},r[k]=p,q.push(p)));r[k].faces3.push(t);r[k].vertices+=3}a[g]=q;d.groupsNeedUpdate=!1}a=xb[d.id];g=0;for(e=a.length;gDa;Da++)kb[Da]=!J.autoScaleCubemaps||Ob||Tb?Tb?ua.image[Da].image:ua.image[Da]:R(ua.image[Da],$c);var ka=kb[0],Zb=THREE.Math.isPowerOfTwo(ka.width)&&THREE.Math.isPowerOfTwo(ka.height),ab=Q(ua.format),Fb=Q(ua.type);F(l.TEXTURE_CUBE_MAP,ua,Zb);for(Da=0;6>Da;Da++)if(Ob)for(var gb,$b=kb[Da].mipmaps,ga=0,Xb=$b.length;ga=Oc&&console.warn("WebGLRenderer: trying to use "+a+" texture units while this GPU supports only "+ -Oc);dc+=1;return a}function x(a,b){a._modelViewMatrix.multiplyMatrices(b.matrixWorldInverse,a.matrixWorld);a._normalMatrix.getNormalMatrix(a._modelViewMatrix)}function D(a,b,c,d){a[b]=c.r*c.r*d;a[b+1]=c.g*c.g*d;a[b+2]=c.b*c.b*d}function E(a,b,c,d){a[b]=c.r*d;a[b+1]=c.g*d;a[b+2]=c.b*d}function A(a){a!==Pc&&(l.lineWidth(a),Pc=a)}function B(a,b,c){Qc!==a&&(a?l.enable(l.POLYGON_OFFSET_FILL):l.disable(l.POLYGON_OFFSET_FILL),Qc=a);!a||Rc===b&&Sc===c||(l.polygonOffset(b,c),Rc=b,Sc=c)}function F(a,b,c){c? -(l.texParameteri(a,l.TEXTURE_WRAP_S,Q(b.wrapS)),l.texParameteri(a,l.TEXTURE_WRAP_T,Q(b.wrapT)),l.texParameteri(a,l.TEXTURE_MAG_FILTER,Q(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,Q(b.minFilter))):(l.texParameteri(a,l.TEXTURE_WRAP_S,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_WRAP_T,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_MAG_FILTER,T(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,T(b.minFilter)));(c=pa.get("EXT_texture_filter_anisotropic"))&&b.type!==THREE.FloatType&&(1b||a.height>b){var c=b/Math.max(a.width,a.height),d=document.createElement("canvas");d.width=Math.floor(a.width*c);d.height=Math.floor(a.height*c);d.getContext("2d").drawImage(a,0,0,a.width,a.height,0,0,d.width,d.height);console.log("THREE.WebGLRenderer:",a,"is too big ("+a.width+"x"+a.height+"). Resized to "+d.width+"x"+d.height+ -".");return d}return a}function H(a,b){l.bindRenderbuffer(l.RENDERBUFFER,a);b.depthBuffer&&!b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_COMPONENT16,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT,l.RENDERBUFFER,a)):b.depthBuffer&&b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_STENCIL,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a)):l.renderbufferStorage(l.RENDERBUFFER,l.RGBA4,b.width, -b.height)}function C(a){a instanceof THREE.WebGLRenderTargetCube?(l.bindTexture(l.TEXTURE_CUBE_MAP,a.__webglTexture),l.generateMipmap(l.TEXTURE_CUBE_MAP),l.bindTexture(l.TEXTURE_CUBE_MAP,null)):(l.bindTexture(l.TEXTURE_2D,a.__webglTexture),l.generateMipmap(l.TEXTURE_2D),l.bindTexture(l.TEXTURE_2D,null))}function T(a){return a===THREE.NearestFilter||a===THREE.NearestMipMapNearestFilter||a===THREE.NearestMipMapLinearFilter?l.NEAREST:l.LINEAR}function Q(a){var b;if(a===THREE.RepeatWrapping)return l.REPEAT; -if(a===THREE.ClampToEdgeWrapping)return l.CLAMP_TO_EDGE;if(a===THREE.MirroredRepeatWrapping)return l.MIRRORED_REPEAT;if(a===THREE.NearestFilter)return l.NEAREST;if(a===THREE.NearestMipMapNearestFilter)return l.NEAREST_MIPMAP_NEAREST;if(a===THREE.NearestMipMapLinearFilter)return l.NEAREST_MIPMAP_LINEAR;if(a===THREE.LinearFilter)return l.LINEAR;if(a===THREE.LinearMipMapNearestFilter)return l.LINEAR_MIPMAP_NEAREST;if(a===THREE.LinearMipMapLinearFilter)return l.LINEAR_MIPMAP_LINEAR;if(a===THREE.UnsignedByteType)return l.UNSIGNED_BYTE; -if(a===THREE.UnsignedShort4444Type)return l.UNSIGNED_SHORT_4_4_4_4;if(a===THREE.UnsignedShort5551Type)return l.UNSIGNED_SHORT_5_5_5_1;if(a===THREE.UnsignedShort565Type)return l.UNSIGNED_SHORT_5_6_5;if(a===THREE.ByteType)return l.BYTE;if(a===THREE.ShortType)return l.SHORT;if(a===THREE.UnsignedShortType)return l.UNSIGNED_SHORT;if(a===THREE.IntType)return l.INT;if(a===THREE.UnsignedIntType)return l.UNSIGNED_INT;if(a===THREE.FloatType)return l.FLOAT;if(a===THREE.AlphaFormat)return l.ALPHA;if(a===THREE.RGBFormat)return l.RGB; -if(a===THREE.RGBAFormat)return l.RGBA;if(a===THREE.LuminanceFormat)return l.LUMINANCE;if(a===THREE.LuminanceAlphaFormat)return l.LUMINANCE_ALPHA;if(a===THREE.AddEquation)return l.FUNC_ADD;if(a===THREE.SubtractEquation)return l.FUNC_SUBTRACT;if(a===THREE.ReverseSubtractEquation)return l.FUNC_REVERSE_SUBTRACT;if(a===THREE.ZeroFactor)return l.ZERO;if(a===THREE.OneFactor)return l.ONE;if(a===THREE.SrcColorFactor)return l.SRC_COLOR;if(a===THREE.OneMinusSrcColorFactor)return l.ONE_MINUS_SRC_COLOR;if(a=== -THREE.SrcAlphaFactor)return l.SRC_ALPHA;if(a===THREE.OneMinusSrcAlphaFactor)return l.ONE_MINUS_SRC_ALPHA;if(a===THREE.DstAlphaFactor)return l.DST_ALPHA;if(a===THREE.OneMinusDstAlphaFactor)return l.ONE_MINUS_DST_ALPHA;if(a===THREE.DstColorFactor)return l.DST_COLOR;if(a===THREE.OneMinusDstColorFactor)return l.ONE_MINUS_DST_COLOR;if(a===THREE.SrcAlphaSaturateFactor)return l.SRC_ALPHA_SATURATE;b=pa.get("WEBGL_compressed_texture_s3tc");if(null!==b){if(a===THREE.RGB_S3TC_DXT1_Format)return b.COMPRESSED_RGB_S3TC_DXT1_EXT; -if(a===THREE.RGBA_S3TC_DXT1_Format)return b.COMPRESSED_RGBA_S3TC_DXT1_EXT;if(a===THREE.RGBA_S3TC_DXT3_Format)return b.COMPRESSED_RGBA_S3TC_DXT3_EXT;if(a===THREE.RGBA_S3TC_DXT5_Format)return b.COMPRESSED_RGBA_S3TC_DXT5_EXT}b=pa.get("WEBGL_compressed_texture_pvrtc");if(null!==b){if(a===THREE.RGB_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;if(a===THREE.RGB_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;if(a===THREE.RGBA_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG; -if(a===THREE.RGBA_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG}b=pa.get("EXT_blend_minmax");if(null!==b){if(a===THREE.MinEquation)return b.MIN_EXT;if(a===THREE.MaxEquation)return b.MAX_EXT}return 0}console.log("THREE.WebGLRenderer",THREE.REVISION);a=a||{};var O=void 0!==a.canvas?a.canvas:document.createElement("canvas"),S=void 0!==a.context?a.context:null,X=void 0!==a.precision?a.precision:"highp",Y=void 0!==a.alpha?a.alpha:!1,la=void 0!==a.depth?a.depth:!0,ma=void 0!==a.stencil? -a.stencil:!0,ya=void 0!==a.antialias?a.antialias:!1,P=void 0!==a.premultipliedAlpha?a.premultipliedAlpha:!0,Ga=void 0!==a.preserveDrawingBuffer?a.preserveDrawingBuffer:!1,Fa=void 0!==a.logarithmicDepthBuffer?a.logarithmicDepthBuffer:!1,za=new THREE.Color(0),bb=0,cb=[],ob={},jb=[],Jb=[],Ib=[],yb=[],Ra=[];this.domElement=O;this.context=null;this.devicePixelRatio=void 0!==a.devicePixelRatio?a.devicePixelRatio:void 0!==self.devicePixelRatio?self.devicePixelRatio:1;this.sortObjects=this.autoClearStencil= -this.autoClearDepth=this.autoClearColor=this.autoClear=!0;this.shadowMapEnabled=this.gammaOutput=this.gammaInput=!1;this.shadowMapType=THREE.PCFShadowMap;this.shadowMapCullFace=THREE.CullFaceFront;this.shadowMapCascade=this.shadowMapDebug=!1;this.maxMorphTargets=8;this.maxMorphNormals=4;this.autoScaleCubemaps=!0;this.info={memory:{programs:0,geometries:0,textures:0},render:{calls:0,vertices:0,faces:0,points:0}};var J=this,hb=[],tc=null,Tc=null,Kb=-1,Oa=-1,ec=null,dc=0,Lb=-1,Mb=-1,pb=-1,Nb=-1,Ob=-1, -Xb=-1,Yb=-1,nb=-1,Qc=null,Rc=null,Sc=null,Pc=null,Pb=0,kc=0,lc=O.width,mc=O.height,Uc=0,Vc=0,wb=new Uint8Array(16),ib=new Uint8Array(16),Ec=new THREE.Frustum,Ac=new THREE.Matrix4,Gc=new THREE.Matrix4,Na=new THREE.Vector3,sa=new THREE.Vector3,fc=!0,Mc={ambient:[0,0,0],directional:{length:0,colors:[],positions:[]},point:{length:0,colors:[],positions:[],distances:[]},spot:{length:0,colors:[],positions:[],distances:[],directions:[],anglesCos:[],exponents:[]},hemi:{length:0,skyColors:[],groundColors:[], -positions:[]}},l;try{var Wc={alpha:Y,depth:la,stencil:ma,antialias:ya,premultipliedAlpha:P,preserveDrawingBuffer:Ga};l=S||O.getContext("webgl",Wc)||O.getContext("experimental-webgl",Wc);if(null===l){if(null!==O.getContext("webgl"))throw"Error creating WebGL context with your selected attributes.";throw"Error creating WebGL context.";}}catch(ad){console.error(ad)}void 0===l.getShaderPrecisionFormat&&(l.getShaderPrecisionFormat=function(){return{rangeMin:1,rangeMax:1,precision:1}});var pa=new THREE.WebGLExtensions(l); -pa.get("OES_texture_float");pa.get("OES_texture_float_linear");pa.get("OES_standard_derivatives");Fa&&pa.get("EXT_frag_depth");l.clearColor(0,0,0,1);l.clearDepth(1);l.clearStencil(0);l.enable(l.DEPTH_TEST);l.depthFunc(l.LEQUAL);l.frontFace(l.CCW);l.cullFace(l.BACK);l.enable(l.CULL_FACE);l.enable(l.BLEND);l.blendEquation(l.FUNC_ADD);l.blendFunc(l.SRC_ALPHA,l.ONE_MINUS_SRC_ALPHA);l.viewport(Pb,kc,lc,mc);l.clearColor(za.r,za.g,za.b,bb);this.context=l;var Oc=l.getParameter(l.MAX_TEXTURE_IMAGE_UNITS), -bd=l.getParameter(l.MAX_VERTEX_TEXTURE_IMAGE_UNITS),cd=l.getParameter(l.MAX_TEXTURE_SIZE),$c=l.getParameter(l.MAX_CUBE_MAP_TEXTURE_SIZE),sc=0b;b++)l.deleteFramebuffer(a.__webglFramebuffer[b]),l.deleteRenderbuffer(a.__webglRenderbuffer[b]); -else l.deleteFramebuffer(a.__webglFramebuffer),l.deleteRenderbuffer(a.__webglRenderbuffer);delete a.__webglFramebuffer;delete a.__webglRenderbuffer}J.info.memory.textures--},Dc=function(a){a=a.target;a.removeEventListener("dispose",Dc);Cc(a)},Yc=function(a){for(var b="__webglVertexBuffer __webglNormalBuffer __webglTangentBuffer __webglColorBuffer __webglUVBuffer __webglUV2Buffer __webglSkinIndicesBuffer __webglSkinWeightsBuffer __webglFaceBuffer __webglLineBuffer __webglLineDistanceBuffer".split(" "), -c=0,d=b.length;cd.numSupportedMorphTargets?(n.sort(p),n.length=d.numSupportedMorphTargets):n.length>d.numSupportedMorphNormals?n.sort(p):0===n.length&&n.push([0, -0]);for(m=0;mf;f++){a.__webglFramebuffer[f]=l.createFramebuffer();a.__webglRenderbuffer[f]=l.createRenderbuffer();l.texImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X+f,0,d,a.width,a.height,0,d,e,null);var g=a,h=l.TEXTURE_CUBE_MAP_POSITIVE_X+f;l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer[f]);l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,h,g.__webglTexture,0);H(a.__webglRenderbuffer[f],a)}c&&l.generateMipmap(l.TEXTURE_CUBE_MAP)}else a.__webglFramebuffer= -l.createFramebuffer(),a.__webglRenderbuffer=a.shareDepthFrom?a.shareDepthFrom.__webglRenderbuffer:l.createRenderbuffer(),l.bindTexture(l.TEXTURE_2D,a.__webglTexture),F(l.TEXTURE_2D,a,c),l.texImage2D(l.TEXTURE_2D,0,d,a.width,a.height,0,d,e,null),d=l.TEXTURE_2D,l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer),l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,d,a.__webglTexture,0),a.shareDepthFrom?a.depthBuffer&&!a.stencilBuffer?l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT, -l.RENDERBUFFER,a.__webglRenderbuffer):a.depthBuffer&&a.stencilBuffer&&l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a.__webglRenderbuffer):H(a.__webglRenderbuffer,a),c&&l.generateMipmap(l.TEXTURE_2D);b?l.bindTexture(l.TEXTURE_CUBE_MAP,null):l.bindTexture(l.TEXTURE_2D,null);l.bindRenderbuffer(l.RENDERBUFFER,null);l.bindFramebuffer(l.FRAMEBUFFER,null)}a?(b=b?a.__webglFramebuffer[a.activeCubeFace]:a.__webglFramebuffer,c=a.width,a=a.height,e=d=0):(b=null,c=lc,a=mc, -d=Pb,e=kc);b!==Tc&&(l.bindFramebuffer(l.FRAMEBUFFER,b),l.viewport(d,e,c,a),Tc=b);Uc=c;Vc=a};this.initMaterial=function(){console.warn("THREE.WebGLRenderer: .initMaterial() has been removed.")};this.addPrePlugin=function(){console.warn("THREE.WebGLRenderer: .addPrePlugin() has been removed.")};this.addPostPlugin=function(){console.warn("THREE.WebGLRenderer: .addPostPlugin() has been removed.")};this.updateShadowMap=function(){console.warn("THREE.WebGLRenderer: .updateShadowMap() has been removed.")}}; -THREE.WebGLRenderTarget=function(a,b,c){this.width=a;this.height=b;c=c||{};this.wrapS=void 0!==c.wrapS?c.wrapS:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==c.wrapT?c.wrapT:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==c.magFilter?c.magFilter:THREE.LinearFilter;this.minFilter=void 0!==c.minFilter?c.minFilter:THREE.LinearMipMapLinearFilter;this.anisotropy=void 0!==c.anisotropy?c.anisotropy:1;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.format=void 0!==c.format?c.format: -THREE.RGBAFormat;this.type=void 0!==c.type?c.type:THREE.UnsignedByteType;this.depthBuffer=void 0!==c.depthBuffer?c.depthBuffer:!0;this.stencilBuffer=void 0!==c.stencilBuffer?c.stencilBuffer:!0;this.generateMipmaps=!0;this.shareDepthFrom=null}; -THREE.WebGLRenderTarget.prototype={constructor:THREE.WebGLRenderTarget,setSize:function(a,b){this.width=a;this.height=b},clone:function(){var a=new THREE.WebGLRenderTarget(this.width,this.height);a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.offset.copy(this.offset);a.repeat.copy(this.repeat);a.format=this.format;a.type=this.type;a.depthBuffer=this.depthBuffer;a.stencilBuffer=this.stencilBuffer;a.generateMipmaps=this.generateMipmaps; -a.shareDepthFrom=this.shareDepthFrom;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube=function(a,b,c){THREE.WebGLRenderTarget.call(this,a,b,c);this.activeCubeFace=0};THREE.WebGLRenderTargetCube.prototype=Object.create(THREE.WebGLRenderTarget.prototype); -THREE.WebGLExtensions=function(a){var b={};this.get=function(c){if(void 0!==b[c])return b[c];var d;switch(c){case "OES_texture_float":d=a.getExtension("OES_texture_float");break;case "OES_texture_float_linear":d=a.getExtension("OES_texture_float_linear");break;case "OES_standard_derivatives":d=a.getExtension("OES_standard_derivatives");break;case "EXT_texture_filter_anisotropic":d=a.getExtension("EXT_texture_filter_anisotropic")||a.getExtension("MOZ_EXT_texture_filter_anisotropic")||a.getExtension("WEBKIT_EXT_texture_filter_anisotropic"); -break;case "WEBGL_compressed_texture_s3tc":d=a.getExtension("WEBGL_compressed_texture_s3tc")||a.getExtension("MOZ_WEBGL_compressed_texture_s3tc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc");break;case "WEBGL_compressed_texture_pvrtc":d=a.getExtension("WEBGL_compressed_texture_pvrtc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc");break;case "OES_element_index_uint":d=a.getExtension("OES_element_index_uint");break;case "EXT_blend_minmax":d=a.getExtension("EXT_blend_minmax");break; -case "EXT_frag_depth":d=a.getExtension("EXT_frag_depth")}null===d&&console.log("THREE.WebGLRenderer: "+c+" extension not supported.");return b[c]=d}}; -THREE.WebGLProgram=function(){var a=0;return function(b,c,d,e){var f=b.context,g=d.defines,h=d.__webglShader.uniforms,k=d.attributes,n=d.__webglShader.vertexShader,p=d.__webglShader.fragmentShader,q=d.index0AttributeName;void 0===q&&!0===e.morphTargets&&(q="position");var m="SHADOWMAP_TYPE_BASIC";e.shadowMapType===THREE.PCFShadowMap?m="SHADOWMAP_TYPE_PCF":e.shadowMapType===THREE.PCFSoftShadowMap&&(m="SHADOWMAP_TYPE_PCF_SOFT");var r,t;r=[];for(var s in g)t=g[s],!1!==t&&(t="#define "+s+" "+t,r.push(t)); -r=r.join("\n");g=f.createProgram();d instanceof THREE.RawShaderMaterial?b=d="":(d=["precision "+e.precision+" float;","precision "+e.precision+" int;",r,e.supportsVertexTextures?"#define VERTEX_TEXTURES":"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"","#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,"#define MAX_BONES "+ -e.maxBones,e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.skinning?"#define USE_SKINNING":"",e.useVertexTexture?"#define BONE_TEXTURE":"",e.morphTargets?"#define USE_MORPHTARGETS":"",e.morphNormals?"#define USE_MORPHNORMALS":"",e.wrapAround?"#define WRAP_AROUND": -"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.sizeAttenuation?"#define USE_SIZEATTENUATION":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 modelMatrix;\nuniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat4 viewMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\nattribute vec3 position;\nattribute vec3 normal;\nattribute vec2 uv;\nattribute vec2 uv2;\n#ifdef USE_COLOR\n\tattribute vec3 color;\n#endif\n#ifdef USE_MORPHTARGETS\n\tattribute vec3 morphTarget0;\n\tattribute vec3 morphTarget1;\n\tattribute vec3 morphTarget2;\n\tattribute vec3 morphTarget3;\n\t#ifdef USE_MORPHNORMALS\n\t\tattribute vec3 morphNormal0;\n\t\tattribute vec3 morphNormal1;\n\t\tattribute vec3 morphNormal2;\n\t\tattribute vec3 morphNormal3;\n\t#else\n\t\tattribute vec3 morphTarget4;\n\t\tattribute vec3 morphTarget5;\n\t\tattribute vec3 morphTarget6;\n\t\tattribute vec3 morphTarget7;\n\t#endif\n#endif\n#ifdef USE_SKINNING\n\tattribute vec4 skinIndex;\n\tattribute vec4 skinWeight;\n#endif\n"].join("\n"), -b=["precision "+e.precision+" float;","precision "+e.precision+" int;",e.bumpMap||e.normalMap?"#extension GL_OES_standard_derivatives : enable":"",r,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,e.alphaTest?"#define ALPHATEST "+e.alphaTest:"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"",e.useFog&&e.fog?"#define USE_FOG": -"",e.useFog&&e.fogExp?"#define FOG_EXP2":"",e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.metal?"#define METAL":"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP": -"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 viewMatrix;\nuniform vec3 cameraPosition;\n"].join("\n"));n=new THREE.WebGLShader(f,f.VERTEX_SHADER,d+n);p=new THREE.WebGLShader(f,f.FRAGMENT_SHADER,b+p);f.attachShader(g,n);f.attachShader(g,p);void 0!==q&&f.bindAttribLocation(g,0,q);f.linkProgram(g);!1===f.getProgramParameter(g,f.LINK_STATUS)&&(console.error("THREE.WebGLProgram: Could not initialise shader."), -console.error("gl.VALIDATE_STATUS",f.getProgramParameter(g,f.VALIDATE_STATUS)),console.error("gl.getError()",f.getError()));""!==f.getProgramInfoLog(g)&&console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",f.getProgramInfoLog(g));f.deleteShader(n);f.deleteShader(p);q="viewMatrix modelViewMatrix projectionMatrix normalMatrix modelMatrix cameraPosition morphTargetInfluences bindMatrix bindMatrixInverse".split(" ");e.useVertexTexture?(q.push("boneTexture"),q.push("boneTextureWidth"),q.push("boneTextureHeight")): -q.push("boneGlobalMatrices");e.logarithmicDepthBuffer&&q.push("logDepthBufFC");for(var u in h)q.push(u);h=q;u={};q=0;for(b=h.length;qT;T++)F[T]=new THREE.Vector3,A[T]=new THREE.Vector3;F=B.shadowCascadeNearZ[C];B=B.shadowCascadeFarZ[C];A[0].set(-1,-1,F);A[1].set(1,-1,F);A[2].set(-1,1,F);A[3].set(1,1,F);A[4].set(-1,-1,B);A[5].set(1,-1,B);A[6].set(-1,1,B);A[7].set(1,1,B);H.originalCamera=v;A=new THREE.Gyroscope;A.position.copy(x.shadowCascadeOffset);A.add(H);A.add(H.target);v.add(A);x.shadowCascadeArray[E]=H;console.log("Created virtualLight",H)}C= -x;F=E;B=C.shadowCascadeArray[F];B.position.copy(C.position);B.target.position.copy(C.target.position);B.lookAt(B.target);B.shadowCameraVisible=C.shadowCameraVisible;B.shadowDarkness=C.shadowDarkness;B.shadowBias=C.shadowCascadeBias[F];A=C.shadowCascadeNearZ[F];C=C.shadowCascadeFarZ[F];B=B.pointsFrustum;B[0].z=A;B[1].z=A;B[2].z=A;B[3].z=A;B[4].z=C;B[5].z=C;B[6].z=C;B[7].z=C;R[D]=H;D++}else R[D]=x,D++;u=0;for(K=R.length;uC;C++)F=B[C],F.copy(A[C]),F.unproject(E),F.applyMatrix4(D.matrixWorldInverse),F.xr.x&&(r.x=F.x),F.yr.y&&(r.y=F.y),F.zr.z&&(r.z=F.z);D.left=m.x;D.right=r.x;D.top=r.y;D.bottom=m.y;D.updateProjectionMatrix()}D=x.shadowMap;A=x.shadowMatrix;E=x.shadowCamera;E.position.setFromMatrixPosition(x.matrixWorld);t.setFromMatrixPosition(x.target.matrixWorld);E.lookAt(t);E.updateMatrixWorld();E.matrixWorldInverse.getInverse(E.matrixWorld);x.cameraHelper&& -(x.cameraHelper.visible=x.shadowCameraVisible);x.shadowCameraVisible&&x.cameraHelper.update();A.set(.5,0,0,.5,0,.5,0,.5,0,0,.5,.5,0,0,0,1);A.multiply(E.projectionMatrix);A.multiply(E.matrixWorldInverse);q.multiplyMatrices(E.projectionMatrix,E.matrixWorldInverse);p.setFromMatrix(q);a.setRenderTarget(D);a.clear();s.length=0;e(c,c,E);x=0;for(D=s.length;x 0 ) {\nfloat depth = gl_FragCoord.z / gl_FragCoord.w;\nfloat fogFactor = 0.0;\nif ( fogType == 1 ) {\nfogFactor = smoothstep( fogNear, fogFar, depth );\n} else {\nconst float LOG2 = 1.442695;\nfloat fogFactor = exp2( - fogDensity * fogDensity * depth * depth * LOG2 );\nfogFactor = 1.0 - clamp( fogFactor, 0.0, 1.0 );\n}\ngl_FragColor = mix( gl_FragColor, vec4( fogColor, gl_FragColor.w ), fogFactor );\n}\n}"].join("\n")); -w.compileShader(R);w.compileShader(H);w.attachShader(F,R);w.attachShader(F,H);w.linkProgram(F);D=F;v=w.getAttribLocation(D,"position");y=w.getAttribLocation(D,"uv");c=w.getUniformLocation(D,"uvOffset");d=w.getUniformLocation(D,"uvScale");e=w.getUniformLocation(D,"rotation");f=w.getUniformLocation(D,"scale");g=w.getUniformLocation(D,"color");h=w.getUniformLocation(D,"map");k=w.getUniformLocation(D,"opacity");n=w.getUniformLocation(D,"modelViewMatrix");p=w.getUniformLocation(D,"projectionMatrix");q= -w.getUniformLocation(D,"fogType");m=w.getUniformLocation(D,"fogDensity");r=w.getUniformLocation(D,"fogNear");t=w.getUniformLocation(D,"fogFar");s=w.getUniformLocation(D,"fogColor");u=w.getUniformLocation(D,"alphaTest");F=document.createElement("canvas");F.width=8;F.height=8;R=F.getContext("2d");R.fillStyle="white";R.fillRect(0,0,8,8);E=new THREE.Texture(F);E.needsUpdate=!0}w.useProgram(D);w.enableVertexAttribArray(v);w.enableVertexAttribArray(y);w.disable(w.CULL_FACE);w.enable(w.BLEND);w.bindBuffer(w.ARRAY_BUFFER, -K);w.vertexAttribPointer(v,2,w.FLOAT,!1,16,0);w.vertexAttribPointer(y,2,w.FLOAT,!1,16,8);w.bindBuffer(w.ELEMENT_ARRAY_BUFFER,x);w.uniformMatrix4fv(p,!1,B.projectionMatrix.elements);w.activeTexture(w.TEXTURE0);w.uniform1i(h,0);R=F=0;(H=A.fog)?(w.uniform3f(s,H.color.r,H.color.g,H.color.b),H instanceof THREE.Fog?(w.uniform1f(r,H.near),w.uniform1f(t,H.far),w.uniform1i(q,1),R=F=1):H instanceof THREE.FogExp2&&(w.uniform1f(m,H.density),w.uniform1i(q,2),R=F=2)):(w.uniform1i(q,0),R=F=0);for(var H=0,C=b.length;H< -C;H++){var T=b[H];T._modelViewMatrix.multiplyMatrices(B.matrixWorldInverse,T.matrixWorld);T.z=null===T.renderDepth?-T._modelViewMatrix.elements[14]:T.renderDepth}b.sort(G);for(var Q=[],H=0,C=b.length;Hq-1?0:q-1,r=q+1>e-1?e-1:q+1,t=0>p-1?0:p-1,s=p+1>d-1?d-1:p+1,u=[],v=[0,0,h[4*(q*d+p)]/255*b];u.push([-1,0,h[4*(q*d+t)]/255*b]);u.push([-1,-1,h[4*(m*d+t)]/255*b]);u.push([0,-1,h[4*(m*d+p)]/255*b]);u.push([1,-1,h[4*(m*d+s)]/255*b]);u.push([1,0,h[4*(q*d+s)]/255*b]);u.push([1,1,h[4*(r*d+s)]/255*b]);u.push([0,1,h[4*(r*d+p)]/255* -b]);u.push([-1,1,h[4*(r*d+t)]/255*b]);m=[];t=u.length;for(r=0;re)return null;var f=[],g=[],h=[],k,n,p;if(0=q--){console.log("Warning, unable to triangulate polygon!");break}k=n;e<=k&&(k=0);n=k+1;e<=n&&(n=0);p=n+1;e<=p&&(p=0);var m;a:{var r=m=void 0,t=void 0,s=void 0,u=void 0,v=void 0,y=void 0,G=void 0,w=void 0, -r=a[g[k]].x,t=a[g[k]].y,s=a[g[n]].x,u=a[g[n]].y,v=a[g[p]].x,y=a[g[p]].y;if(1E-10>(s-r)*(y-t)-(u-t)*(v-r))m=!1;else{var K=void 0,x=void 0,D=void 0,E=void 0,A=void 0,B=void 0,F=void 0,R=void 0,H=void 0,C=void 0,H=R=F=w=G=void 0,K=v-s,x=y-u,D=r-v,E=t-y,A=s-r,B=u-t;for(m=0;mk)g=d+1;else if(0b&&(b=0);1=b)return b=c[a]-b,a=this.curves[a],b=1-b/a.getLength(),a.getPointAt(b);a++}return null};THREE.CurvePath.prototype.getLength=function(){var a=this.getCurveLengths();return a[a.length-1]}; -THREE.CurvePath.prototype.getCurveLengths=function(){if(this.cacheLengths&&this.cacheLengths.length==this.curves.length)return this.cacheLengths;var a=[],b=0,c,d=this.curves.length;for(c=0;cb?b=h.x:h.xc?c=h.y:h.yd?d=h.z:h.zMath.abs(d.x-c[0].x)&&1E-10>Math.abs(d.y-c[0].y)&&c.splice(c.length-1,1);b&&c.push(c[0]);return c}; -THREE.Path.prototype.toShapes=function(a,b){function c(a){for(var b=[],c=0,d=a.length;cm&&(g=b[f],k=-k,h=b[e],m=-m),!(a.yh.y))if(a.y==g.y){if(a.x==g.x)return!0}else{e=m*(a.x-g.x)-k*(a.y-g.y);if(0==e)return!0;0>e||(d=!d)}}else if(a.y==g.y&&(h.x<=a.x&&a.x<=g.x||g.x<=a.x&&a.x<= -h.x))return!0}return d}var e=function(a){var b,c,d,e,f=[],g=new THREE.Path;b=0;for(c=a.length;bE||E>D)return[];k=n*p-k*q;if(0>k||k>D)return[]}else{if(0d?[]:k==d?f?[]:[g]:a<=d?[g,h]: -[g,n]}function e(a,b,c,d){var e=b.x-a.x,f=b.y-a.y;b=c.x-a.x;c=c.y-a.y;var g=d.x-a.x;d=d.y-a.y;a=e*c-f*b;e=e*d-f*g;return 1E-10f&&(f=d);var g=a+1;g>d&&(g=0);d=e(h[a],h[f],h[g],k[b]);if(!d)return!1; -d=k.length-1;f=b-1;0>f&&(f=d);g=b+1;g>d&&(g=0);return(d=e(k[b],k[f],k[g],h[a]))?!0:!1}function f(a,b){var c,e;for(c=0;cC){console.log("Infinite Loop! Holes left:"+ -n.length+", Probably Hole outside Shape!");break}for(q=B;qh;h++)n=k[h].x+":"+k[h].y, -n=p[n],void 0!==n&&(k[h]=n);return q.concat()},isClockWise:function(a){return 0>THREE.FontUtils.Triangulate.area(a)},b2p0:function(a,b){var c=1-a;return c*c*b},b2p1:function(a,b){return 2*(1-a)*a*b},b2p2:function(a,b){return a*a*b},b2:function(a,b,c,d){return this.b2p0(a,b)+this.b2p1(a,c)+this.b2p2(a,d)},b3p0:function(a,b){var c=1-a;return c*c*c*b},b3p1:function(a,b){var c=1-a;return 3*c*c*a*b},b3p2:function(a,b){return 3*(1-a)*a*a*b},b3p3:function(a,b){return a*a*a*b},b3:function(a,b,c,d,e){return this.b3p0(a, -b)+this.b3p1(a,c)+this.b3p2(a,d)+this.b3p3(a,e)}};THREE.LineCurve=function(a,b){this.v1=a;this.v2=b};THREE.LineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.LineCurve.prototype.getPoint=function(a){var b=this.v2.clone().sub(this.v1);b.multiplyScalar(a).add(this.v1);return b};THREE.LineCurve.prototype.getPointAt=function(a){return this.getPoint(a)};THREE.LineCurve.prototype.getTangent=function(a){return this.v2.clone().sub(this.v1).normalize()}; -THREE.QuadraticBezierCurve=function(a,b,c){this.v0=a;this.v1=b;this.v2=c};THREE.QuadraticBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.QuadraticBezierCurve.prototype.getPoint=function(a){var b=new THREE.Vector2;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);return b}; -THREE.QuadraticBezierCurve.prototype.getTangent=function(a){var b=new THREE.Vector2;b.x=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.y,this.v1.y,this.v2.y);return b.normalize()};THREE.CubicBezierCurve=function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d};THREE.CubicBezierCurve.prototype=Object.create(THREE.Curve.prototype); -THREE.CubicBezierCurve.prototype.getPoint=function(a){var b;b=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);return new THREE.Vector2(b,a)};THREE.CubicBezierCurve.prototype.getTangent=function(a){var b;b=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b=new THREE.Vector2(b,a);b.normalize();return b}; -THREE.SplineCurve=function(a){this.points=void 0==a?[]:a};THREE.SplineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.SplineCurve.prototype.getPoint=function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector2;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);return c}; -THREE.EllipseCurve=function(a,b,c,d,e,f,g){this.aX=a;this.aY=b;this.xRadius=c;this.yRadius=d;this.aStartAngle=e;this.aEndAngle=f;this.aClockwise=g};THREE.EllipseCurve.prototype=Object.create(THREE.Curve.prototype); -THREE.EllipseCurve.prototype.getPoint=function(a){var b=this.aEndAngle-this.aStartAngle;0>b&&(b+=2*Math.PI);b>2*Math.PI&&(b-=2*Math.PI);a=!0===this.aClockwise?this.aEndAngle+(1-a)*(2*Math.PI-b):this.aStartAngle+a*b;b=new THREE.Vector2;b.x=this.aX+this.xRadius*Math.cos(a);b.y=this.aY+this.yRadius*Math.sin(a);return b};THREE.ArcCurve=function(a,b,c,d,e,f){THREE.EllipseCurve.call(this,a,b,c,c,d,e,f)};THREE.ArcCurve.prototype=Object.create(THREE.EllipseCurve.prototype); -THREE.LineCurve3=THREE.Curve.create(function(a,b){this.v1=a;this.v2=b},function(a){var b=new THREE.Vector3;b.subVectors(this.v2,this.v1);b.multiplyScalar(a);b.add(this.v1);return b});THREE.QuadraticBezierCurve3=THREE.Curve.create(function(a,b,c){this.v0=a;this.v1=b;this.v2=c},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);b.z=THREE.Shape.Utils.b2(a,this.v0.z,this.v1.z,this.v2.z);return b}); -THREE.CubicBezierCurve3=THREE.Curve.create(function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);b.y=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b.z=THREE.Shape.Utils.b3(a,this.v0.z,this.v1.z,this.v2.z,this.v3.z);return b}); -THREE.SplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c}); -THREE.ClosedSplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-0;var c=Math.floor(a);a-=c;var c=c+(0a.hierarchy[b].keys[c].time&&(a.hierarchy[b].keys[c].time= -0),void 0!==a.hierarchy[b].keys[c].rot&&!(a.hierarchy[b].keys[c].rot instanceof THREE.Quaternion)){var d=a.hierarchy[b].keys[c].rot;a.hierarchy[b].keys[c].rot=(new THREE.Quaternion).fromArray(d)}if(a.hierarchy[b].keys.length&&void 0!==a.hierarchy[b].keys[0].morphTargets){d={};for(c=0;cd;d++){for(var e=this.keyTypes[d],f=this.data.hierarchy[a].keys[0],g=this.getNextKeyWith(e,a,1);g.timef.index;)f=g,g=this.getNextKeyWith(e,a,g.index+1);c.prevKey[e]=f;c.nextKey[e]=g}}}; -THREE.Animation.prototype.resetBlendWeights=function(){for(var a=0,b=this.hierarchy.length;aa.length-2?q:q+1;c[3]=q>a.length-3?q:q+2;q=a[c[0]];r=a[c[1]];t=a[c[2]];s=a[c[3]];c=e*e;m=e*c;d[0]=f(q[0],r[0],t[0],s[0],e,c,m);d[1]=f(q[1],r[1],t[1],s[1],e,c,m);d[2]=f(q[2],r[2],t[2],s[2],e,c,m);return d},f=function(a,b,c,d,e,f,m){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*m+ -(-3*(b-c)-2*a-d)*f+a*e+b};return function(f){if(!1!==this.isPlaying&&(this.currentTime+=f*this.timeScale,0!==this.weight)){f=this.data.length;if(this.currentTime>f||0>this.currentTime)if(this.loop)this.currentTime%=f,0>this.currentTime&&(this.currentTime+=f),this.reset();else{this.stop();return}f=0;for(var h=this.hierarchy.length;fq;q++){var m=this.keyTypes[q],r=n.prevKey[m],t=n.nextKey[m]; -if(0this.timeScale&&r.time>=this.currentTime){r=this.data.hierarchy[f].keys[0];for(t=this.getNextKeyWith(m,f,1);t.timer.index;)r=t,t=this.getNextKeyWith(m,f,t.index+1);n.prevKey[m]=r;n.nextKey[m]=t}k.matrixAutoUpdate=!0;k.matrixWorldNeedsUpdate=!0;var s=(this.currentTime-r.time)/(t.time-r.time),u=r[m],v=t[m];0>s&&(s=0);1a&&(this.currentTime%=a);this.currentTime=Math.min(this.currentTime,a);a=0;for(var b=this.hierarchy.length;af.index;)f=g,g=e[f.index+1];d.prevKey= -f;d.nextKey=g}g.time>=this.currentTime?f.interpolate(g,this.currentTime):f.interpolate(g,g.time);this.data.hierarchy[a].node.updateMatrix();c.matrixWorldNeedsUpdate=!0}}}};THREE.KeyFrameAnimation.prototype.getNextKeyWith=function(a,b,c){b=this.data.hierarchy[b].keys;for(c%=b.length;cthis.duration&&(this.currentTime%=this.duration);this.currentTime=Math.min(this.currentTime,this.duration);c=this.duration/this.frames;var d=Math.floor(this.currentTime/c);d!=b&&(this.mesh.morphTargetInfluences[a]=0,this.mesh.morphTargetInfluences[b]=1,this.mesh.morphTargetInfluences[d]= -0,a=b,b=d);this.mesh.morphTargetInfluences[d]=this.currentTime%c/c;this.mesh.morphTargetInfluences[a]=1-this.mesh.morphTargetInfluences[d]}}}()}; -THREE.BoxGeometry=function(a,b,c,d,e,f){function g(a,b,c,d,e,f,g,s){var u,v=h.widthSegments,y=h.heightSegments,G=e/2,w=f/2,K=h.vertices.length;if("x"===a&&"y"===b||"y"===a&&"x"===b)u="z";else if("x"===a&&"z"===b||"z"===a&&"x"===b)u="y",y=h.depthSegments;else if("z"===a&&"y"===b||"y"===a&&"z"===b)u="x",v=h.depthSegments;var x=v+1,D=y+1,E=e/v,A=f/y,B=new THREE.Vector3;B[u]=0=d)return new THREE.Vector2(c,a);d=Math.sqrt(d/2)}else a=!1,1E-10d?-1E-10>f&&(a=!0):Math.sign(e)== -Math.sign(g)&&(a=!0),a?(c=-e,a=d,d=Math.sqrt(h)):(c=d,a=e,d=Math.sqrt(h/2));return new THREE.Vector2(c/d,a/d)}function e(a,b){var c,d;for(P=a.length;0<=--P;){c=P;d=P-1;0>d&&(d=a.length-1);for(var e=0,f=r+2*p,e=0;eMath.abs(b.y-c.y)?[new THREE.Vector2(b.x,1-b.z),new THREE.Vector2(c.x,1-c.z),new THREE.Vector2(d.x,1-d.z),new THREE.Vector2(e.x,1-e.z)]:[new THREE.Vector2(b.y,1-b.z),new THREE.Vector2(c.y,1-c.z),new THREE.Vector2(d.y, -1-d.z),new THREE.Vector2(e.y,1-e.z)]}};THREE.ShapeGeometry=function(a,b){THREE.Geometry.call(this);this.type="ShapeGeometry";!1===a instanceof Array&&(a=[a]);this.addShapeList(a,b);this.computeFaceNormals()};THREE.ShapeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ShapeGeometry.prototype.addShapeList=function(a,b){for(var c=0,d=a.length;cc&&1===a.x&&(a=new THREE.Vector2(a.x-1,a.y));0===b.x&&0===b.z&&(a=new THREE.Vector2(c/2/Math.PI+.5, -a.y));return a.clone()}THREE.Geometry.call(this);this.type="PolyhedronGeometry";this.parameters={vertices:a,indices:b,radius:c,detail:d};c=c||1;d=d||0;for(var k=this,n=0,p=a.length;nr&&(.2>d&&(b[0].x+=1),.2>a&&(b[1].x+=1),.2>q&&(b[2].x+=1));n=0;for(p=this.vertices.length;nc.y?this.quaternion.set(1,0,0,0):(a.set(c.z,0,-c.x).normalize(),b=Math.acos(c.y),this.quaternion.setFromAxisAngle(a,b))}}(); -THREE.ArrowHelper.prototype.setLength=function(a,b,c){void 0===b&&(b=.2*a);void 0===c&&(c=.2*b);this.line.scale.set(1,a,1);this.line.updateMatrix();this.cone.scale.set(c,b,c);this.cone.position.y=a;this.cone.updateMatrix()};THREE.ArrowHelper.prototype.setColor=function(a){this.line.material.color.set(a);this.cone.material.color.set(a)}; -THREE.BoxHelper=function(a){var b=new THREE.BufferGeometry;b.addAttribute("position",new THREE.BufferAttribute(new Float32Array(72),3));THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:16776960}),THREE.LinePieces);void 0!==a&&this.update(a)};THREE.BoxHelper.prototype=Object.create(THREE.Line.prototype); -THREE.BoxHelper.prototype.update=function(a){var b=a.geometry;null===b.boundingBox&&b.computeBoundingBox();var c=b.boundingBox.min,b=b.boundingBox.max,d=this.geometry.attributes.position.array;d[0]=b.x;d[1]=b.y;d[2]=b.z;d[3]=c.x;d[4]=b.y;d[5]=b.z;d[6]=c.x;d[7]=b.y;d[8]=b.z;d[9]=c.x;d[10]=c.y;d[11]=b.z;d[12]=c.x;d[13]=c.y;d[14]=b.z;d[15]=b.x;d[16]=c.y;d[17]=b.z;d[18]=b.x;d[19]=c.y;d[20]=b.z;d[21]=b.x;d[22]=b.y;d[23]=b.z;d[24]=b.x;d[25]=b.y;d[26]=c.z;d[27]=c.x;d[28]=b.y;d[29]=c.z;d[30]=c.x;d[31]=b.y; -d[32]=c.z;d[33]=c.x;d[34]=c.y;d[35]=c.z;d[36]=c.x;d[37]=c.y;d[38]=c.z;d[39]=b.x;d[40]=c.y;d[41]=c.z;d[42]=b.x;d[43]=c.y;d[44]=c.z;d[45]=b.x;d[46]=b.y;d[47]=c.z;d[48]=b.x;d[49]=b.y;d[50]=b.z;d[51]=b.x;d[52]=b.y;d[53]=c.z;d[54]=c.x;d[55]=b.y;d[56]=b.z;d[57]=c.x;d[58]=b.y;d[59]=c.z;d[60]=c.x;d[61]=c.y;d[62]=b.z;d[63]=c.x;d[64]=c.y;d[65]=c.z;d[66]=b.x;d[67]=c.y;d[68]=b.z;d[69]=b.x;d[70]=c.y;d[71]=c.z;this.geometry.attributes.position.needsUpdate=!0;this.geometry.computeBoundingSphere();this.matrix=a.matrixWorld; -this.matrixAutoUpdate=!1};THREE.BoundingBoxHelper=function(a,b){var c=void 0!==b?b:8947848;this.object=a;this.box=new THREE.Box3;THREE.Mesh.call(this,new THREE.BoxGeometry(1,1,1),new THREE.MeshBasicMaterial({color:c,wireframe:!0}))};THREE.BoundingBoxHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.BoundingBoxHelper.prototype.update=function(){this.box.setFromObject(this.object);this.box.size(this.scale);this.box.center(this.position)}; -THREE.CameraHelper=function(a){function b(a,b,d){c(a,d);c(b,d)}function c(a,b){d.vertices.push(new THREE.Vector3);d.colors.push(new THREE.Color(b));void 0===f[a]&&(f[a]=[]);f[a].push(d.vertices.length-1)}var d=new THREE.Geometry,e=new THREE.LineBasicMaterial({color:16777215,vertexColors:THREE.FaceColors}),f={};b("n1","n2",16755200);b("n2","n4",16755200);b("n4","n3",16755200);b("n3","n1",16755200);b("f1","f2",16755200);b("f2","f4",16755200);b("f4","f3",16755200);b("f3","f1",16755200);b("n1","f1",16755200); -b("n2","f2",16755200);b("n3","f3",16755200);b("n4","f4",16755200);b("p","n1",16711680);b("p","n2",16711680);b("p","n3",16711680);b("p","n4",16711680);b("u1","u2",43775);b("u2","u3",43775);b("u3","u1",43775);b("c","t",16777215);b("p","c",3355443);b("cn1","cn2",3355443);b("cn3","cn4",3355443);b("cf1","cf2",3355443);b("cf3","cf4",3355443);THREE.Line.call(this,d,e,THREE.LinePieces);this.camera=a;this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.pointMap=f;this.update()}; -THREE.CameraHelper.prototype=Object.create(THREE.Line.prototype); -THREE.CameraHelper.prototype.update=function(){var a,b,c=new THREE.Vector3,d=new THREE.Camera,e=function(e,g,h,k){c.set(g,h,k).unproject(d);e=b[e];if(void 0!==e)for(g=0,h=e.length;gt;t++){d[0]=r[g[t]];d[1]=r[g[(t+1)%3]];d.sort(f);var s=d.toString();void 0===e[s]?(e[s]={vert1:d[0],vert2:d[1],face1:q,face2:void 0},p++):e[s].face2=q}d=new Float32Array(6*p);f=0;for(s in e)if(g=e[s],void 0===g.face2|| -.9999>k[g.face1].normal.dot(k[g.face2].normal))p=n[g.vert1],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z,p=n[g.vert2],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3));THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.EdgesHelper.prototype=Object.create(THREE.Line.prototype); -THREE.FaceNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;a=void 0!==c?c:16776960;d=void 0!==d?d:1;b=new THREE.Geometry;c=0;for(var e=this.object.geometry.faces.length;cb;b++)a.faces[b].color=this.colors[4>b?0:1];b=new THREE.MeshBasicMaterial({vertexColors:THREE.FaceColors,wireframe:!0});this.lightSphere=new THREE.Mesh(a,b);this.add(this.lightSphere); -this.update()};THREE.HemisphereLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.HemisphereLightHelper.prototype.dispose=function(){this.lightSphere.geometry.dispose();this.lightSphere.material.dispose()}; -THREE.HemisphereLightHelper.prototype.update=function(){var a=new THREE.Vector3;return function(){this.colors[0].copy(this.light.color).multiplyScalar(this.light.intensity);this.colors[1].copy(this.light.groundColor).multiplyScalar(this.light.intensity);this.lightSphere.lookAt(a.setFromMatrixPosition(this.light.matrixWorld).negate());this.lightSphere.geometry.colorsNeedUpdate=!0}}(); -THREE.PointLightHelper=function(a,b){this.light=a;this.light.updateMatrixWorld();var c=new THREE.SphereGeometry(b,4,2),d=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);THREE.Mesh.call(this,c,d);this.matrix=this.light.matrixWorld;this.matrixAutoUpdate=!1};THREE.PointLightHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.PointLightHelper.prototype.dispose=function(){this.geometry.dispose();this.material.dispose()}; -THREE.PointLightHelper.prototype.update=function(){this.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)}; -THREE.SkeletonHelper=function(a){this.bones=this.getBoneList(a);for(var b=new THREE.Geometry,c=0;cs;s++){d[0]=t[g[s]];d[1]=t[g[(s+1)%3]];d.sort(f);var u=d.toString();void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++)}d=new Float32Array(6*p);m=0;for(r=p;ms;s++)p= -k[q[2*m+s]],g=6*m+3*s,d[g+0]=p.x,d[g+1]=p.y,d[g+2]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3))}else if(a.geometry instanceof THREE.BufferGeometry){if(void 0!==a.geometry.attributes.index){k=a.geometry.attributes.position.array;r=a.geometry.attributes.index.array;n=a.geometry.drawcalls;p=0;0===n.length&&(n=[{count:r.length,index:0,start:0}]);for(var q=new Uint32Array(2*r.length),t=0,v=n.length;ts;s++)d[0]= -g+r[m+s],d[1]=g+r[m+(s+1)%3],d.sort(f),u=d.toString(),void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++);d=new Float32Array(6*p);m=0;for(r=p;ms;s++)g=6*m+3*s,p=3*q[2*m+s],d[g+0]=k[p],d[g+1]=k[p+1],d[g+2]=k[p+2]}else for(k=a.geometry.attributes.position.array,p=k.length/3,q=p/3,d=new Float32Array(6*p),m=0,r=q;ms;s++)g=18*m+6*s,q=9*m+3*s,d[g+0]=k[q],d[g+1]=k[q+1],d[g+2]=k[q+2],p=9*m+(s+1)%3*3,d[g+3]=k[p],d[g+4]=k[p+1],d[g+5]=k[p+2];h.addAttribute("position",new THREE.BufferAttribute(d, -3))}THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.WireframeHelper.prototype=Object.create(THREE.Line.prototype);THREE.ImmediateRenderObject=function(){THREE.Object3D.call(this);this.render=function(a){}};THREE.ImmediateRenderObject.prototype=Object.create(THREE.Object3D.prototype); -THREE.MorphBlendMesh=function(a,b){THREE.Mesh.call(this,a,b);this.animationsMap={};this.animationsList=[];var c=this.geometry.morphTargets.length;this.createAnimation("__default",0,c-1,c/1);this.setAnimationWeight("__default",1)};THREE.MorphBlendMesh.prototype=Object.create(THREE.Mesh.prototype); -THREE.MorphBlendMesh.prototype.createAnimation=function(a,b,c,d){b={startFrame:b,endFrame:c,length:c-b+1,fps:d,duration:(c-b)/d,lastFrame:0,currentFrame:0,active:!1,time:0,direction:1,weight:1,directionBackwards:!1,mirroredLoop:!1};this.animationsMap[a]=b;this.animationsList.push(b)}; -THREE.MorphBlendMesh.prototype.autoCreateAnimations=function(a){for(var b=/([a-z]+)_?(\d+)/,c,d={},e=this.geometry,f=0,g=e.morphTargets.length;fh.end&&(h.end=f);c||(c=k)}}for(k in d)h=d[k],this.createAnimation(k,h.start,h.end,a);this.firstAnimation=c}; -THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=this.animationsMap[a])a.direction=1,a.directionBackwards=!1};THREE.MorphBlendMesh.prototype.setAnimationDirectionBackward=function(a){if(a=this.animationsMap[a])a.direction=-1,a.directionBackwards=!0};THREE.MorphBlendMesh.prototype.setAnimationFPS=function(a,b){var c=this.animationsMap[a];c&&(c.fps=b,c.duration=(c.end-c.start)/c.fps)}; -THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b}; -THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):console.warn("animation["+a+"] undefined")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1}; -THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;bd.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g=d.weight; -f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; diff --git a/plugins/Sidebar/media-globe/globe.js b/plugins/Sidebar/media-globe/globe.js deleted file mode 100644 index a5523796..00000000 --- a/plugins/Sidebar/media-globe/globe.js +++ /dev/null @@ -1,436 +0,0 @@ -/** - * dat.globe Javascript WebGL Globe Toolkit - * http://dataarts.github.com/dat.globe - * - * Copyright 2011 Data Arts Team, Google Creative Lab - * - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - */ - -var DAT = DAT || {}; - -DAT.Globe = function(container, opts) { - opts = opts || {}; - - var colorFn = opts.colorFn || function(x) { - var c = new THREE.Color(); - c.setHSL( ( 0.5 - (x * 2) ), Math.max(0.8, 1.0 - (x * 3)), 0.5 ); - return c; - }; - var imgDir = opts.imgDir || '/globe/'; - - var Shaders = { - 'earth' : { - uniforms: { - 'texture': { type: 't', value: null } - }, - vertexShader: [ - 'varying vec3 vNormal;', - 'varying vec2 vUv;', - 'void main() {', - 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', - 'vNormal = normalize( normalMatrix * normal );', - 'vUv = uv;', - '}' - ].join('\n'), - fragmentShader: [ - 'uniform sampler2D texture;', - 'varying vec3 vNormal;', - 'varying vec2 vUv;', - 'void main() {', - 'vec3 diffuse = texture2D( texture, vUv ).xyz;', - 'float intensity = 1.05 - dot( vNormal, vec3( 0.0, 0.0, 1.0 ) );', - 'vec3 atmosphere = vec3( 1.0, 1.0, 1.0 ) * pow( intensity, 3.0 );', - 'gl_FragColor = vec4( diffuse + atmosphere, 1.0 );', - '}' - ].join('\n') - }, - 'atmosphere' : { - uniforms: {}, - vertexShader: [ - 'varying vec3 vNormal;', - 'void main() {', - 'vNormal = normalize( normalMatrix * normal );', - 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', - '}' - ].join('\n'), - fragmentShader: [ - 'varying vec3 vNormal;', - 'void main() {', - 'float intensity = pow( 0.8 - dot( vNormal, vec3( 0, 0, 1.0 ) ), 12.0 );', - 'gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0 ) * intensity;', - '}' - ].join('\n') - } - }; - - var camera, scene, renderer, w, h; - var mesh, atmosphere, point, running; - - var overRenderer; - var running = true; - - var curZoomSpeed = 0; - var zoomSpeed = 50; - - var mouse = { x: 0, y: 0 }, mouseOnDown = { x: 0, y: 0 }; - var rotation = { x: 0, y: 0 }, - target = { x: Math.PI*3/2, y: Math.PI / 6.0 }, - targetOnDown = { x: 0, y: 0 }; - - var distance = 100000, distanceTarget = 100000; - var padding = 10; - var PI_HALF = Math.PI / 2; - - function init() { - - container.style.color = '#fff'; - container.style.font = '13px/20px Arial, sans-serif'; - - var shader, uniforms, material; - w = container.offsetWidth || window.innerWidth; - h = container.offsetHeight || window.innerHeight; - - camera = new THREE.PerspectiveCamera(30, w / h, 1, 10000); - camera.position.z = distance; - - scene = new THREE.Scene(); - - var geometry = new THREE.SphereGeometry(200, 40, 30); - - shader = Shaders['earth']; - uniforms = THREE.UniformsUtils.clone(shader.uniforms); - - uniforms['texture'].value = THREE.ImageUtils.loadTexture(imgDir+'world.jpg'); - - material = new THREE.ShaderMaterial({ - - uniforms: uniforms, - vertexShader: shader.vertexShader, - fragmentShader: shader.fragmentShader - - }); - - mesh = new THREE.Mesh(geometry, material); - mesh.rotation.y = Math.PI; - scene.add(mesh); - - shader = Shaders['atmosphere']; - uniforms = THREE.UniformsUtils.clone(shader.uniforms); - - material = new THREE.ShaderMaterial({ - - uniforms: uniforms, - vertexShader: shader.vertexShader, - fragmentShader: shader.fragmentShader, - side: THREE.BackSide, - blending: THREE.AdditiveBlending, - transparent: true - - }); - - mesh = new THREE.Mesh(geometry, material); - mesh.scale.set( 1.1, 1.1, 1.1 ); - scene.add(mesh); - - geometry = new THREE.BoxGeometry(2.75, 2.75, 1); - geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0,0,-0.5)); - - point = new THREE.Mesh(geometry); - - renderer = new THREE.WebGLRenderer({antialias: true}); - renderer.setSize(w, h); - renderer.setClearColor( 0x212121, 1 ); - - renderer.domElement.style.position = 'relative'; - - container.appendChild(renderer.domElement); - - container.addEventListener('mousedown', onMouseDown, false); - - if ('onwheel' in document) { - container.addEventListener('wheel', onMouseWheel, false); - } else { - container.addEventListener('mousewheel', onMouseWheel, false); - } - - document.addEventListener('keydown', onDocumentKeyDown, false); - - window.addEventListener('resize', onWindowResize, false); - - container.addEventListener('mouseover', function() { - overRenderer = true; - }, false); - - container.addEventListener('mouseout', function() { - overRenderer = false; - }, false); - } - - function addData(data, opts) { - var lat, lng, size, color, i, step, colorFnWrapper; - - opts.animated = opts.animated || false; - this.is_animated = opts.animated; - opts.format = opts.format || 'magnitude'; // other option is 'legend' - if (opts.format === 'magnitude') { - step = 3; - colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } - } else if (opts.format === 'legend') { - step = 4; - colorFnWrapper = function(data, i) { return colorFn(data[i+3]); } - } else if (opts.format === 'peer') { - colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } - } else { - throw('error: format not supported: '+opts.format); - } - - if (opts.animated) { - if (this._baseGeometry === undefined) { - this._baseGeometry = new THREE.Geometry(); - for (i = 0; i < data.length; i += step) { - lat = data[i]; - lng = data[i + 1]; -// size = data[i + 2]; - color = colorFnWrapper(data,i); - size = 0; - addPoint(lat, lng, size, color, this._baseGeometry); - } - } - if(this._morphTargetId === undefined) { - this._morphTargetId = 0; - } else { - this._morphTargetId += 1; - } - opts.name = opts.name || 'morphTarget'+this._morphTargetId; - } - var subgeo = new THREE.Geometry(); - for (i = 0; i < data.length; i += step) { - lat = data[i]; - lng = data[i + 1]; - color = colorFnWrapper(data,i); - size = data[i + 2]; - size = size*200; - addPoint(lat, lng, size, color, subgeo); - } - if (opts.animated) { - this._baseGeometry.morphTargets.push({'name': opts.name, vertices: subgeo.vertices}); - } else { - this._baseGeometry = subgeo; - } - - }; - - function createPoints() { - if (this._baseGeometry !== undefined) { - if (this.is_animated === false) { - this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ - color: 0xffffff, - vertexColors: THREE.FaceColors, - morphTargets: false - })); - } else { - if (this._baseGeometry.morphTargets.length < 8) { - console.log('t l',this._baseGeometry.morphTargets.length); - var padding = 8-this._baseGeometry.morphTargets.length; - console.log('padding', padding); - for(var i=0; i<=padding; i++) { - console.log('padding',i); - this._baseGeometry.morphTargets.push({'name': 'morphPadding'+i, vertices: this._baseGeometry.vertices}); - } - } - this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ - color: 0xffffff, - vertexColors: THREE.FaceColors, - morphTargets: true - })); - } - scene.add(this.points); - } - } - - function addPoint(lat, lng, size, color, subgeo) { - - var phi = (90 - lat) * Math.PI / 180; - var theta = (180 - lng) * Math.PI / 180; - - point.position.x = 200 * Math.sin(phi) * Math.cos(theta); - point.position.y = 200 * Math.cos(phi); - point.position.z = 200 * Math.sin(phi) * Math.sin(theta); - - point.lookAt(mesh.position); - - point.scale.z = Math.max( size, 0.1 ); // avoid non-invertible matrix - point.updateMatrix(); - - for (var i = 0; i < point.geometry.faces.length; i++) { - - point.geometry.faces[i].color = color; - - } - if(point.matrixAutoUpdate){ - point.updateMatrix(); - } - subgeo.merge(point.geometry, point.matrix); - } - - function onMouseDown(event) { - event.preventDefault(); - - container.addEventListener('mousemove', onMouseMove, false); - container.addEventListener('mouseup', onMouseUp, false); - container.addEventListener('mouseout', onMouseOut, false); - - mouseOnDown.x = - event.clientX; - mouseOnDown.y = event.clientY; - - targetOnDown.x = target.x; - targetOnDown.y = target.y; - - container.style.cursor = 'move'; - } - - function onMouseMove(event) { - mouse.x = - event.clientX; - mouse.y = event.clientY; - - var zoomDamp = distance/1000; - - target.x = targetOnDown.x + (mouse.x - mouseOnDown.x) * 0.005 * zoomDamp; - target.y = targetOnDown.y + (mouse.y - mouseOnDown.y) * 0.005 * zoomDamp; - - target.y = target.y > PI_HALF ? PI_HALF : target.y; - target.y = target.y < - PI_HALF ? - PI_HALF : target.y; - } - - function onMouseUp(event) { - container.removeEventListener('mousemove', onMouseMove, false); - container.removeEventListener('mouseup', onMouseUp, false); - container.removeEventListener('mouseout', onMouseOut, false); - container.style.cursor = 'auto'; - } - - function onMouseOut(event) { - container.removeEventListener('mousemove', onMouseMove, false); - container.removeEventListener('mouseup', onMouseUp, false); - container.removeEventListener('mouseout', onMouseOut, false); - } - - function onMouseWheel(event) { - if (container.style.cursor != "move") return false; - event.preventDefault(); - if (overRenderer) { - if (event.deltaY) { - zoom(-event.deltaY * (event.deltaMode == 0 ? 1 : 50)); - } else { - zoom(event.wheelDeltaY * 0.3); - } - } - return false; - } - - function onDocumentKeyDown(event) { - switch (event.keyCode) { - case 38: - zoom(100); - event.preventDefault(); - break; - case 40: - zoom(-100); - event.preventDefault(); - break; - } - } - - function onWindowResize( event ) { - camera.aspect = container.offsetWidth / container.offsetHeight; - camera.updateProjectionMatrix(); - renderer.setSize( container.offsetWidth, container.offsetHeight ); - } - - function zoom(delta) { - distanceTarget -= delta; - distanceTarget = distanceTarget > 855 ? 855 : distanceTarget; - distanceTarget = distanceTarget < 350 ? 350 : distanceTarget; - } - - function animate() { - if (!running) return - requestAnimationFrame(animate); - render(); - } - - function render() { - zoom(curZoomSpeed); - - rotation.x += (target.x - rotation.x) * 0.1; - rotation.y += (target.y - rotation.y) * 0.1; - distance += (distanceTarget - distance) * 0.3; - - camera.position.x = distance * Math.sin(rotation.x) * Math.cos(rotation.y); - camera.position.y = distance * Math.sin(rotation.y); - camera.position.z = distance * Math.cos(rotation.x) * Math.cos(rotation.y); - - camera.lookAt(mesh.position); - - renderer.render(scene, camera); - } - - function unload() { - running = false - container.removeEventListener('mousedown', onMouseDown, false); - if ('onwheel' in document) { - container.removeEventListener('wheel', onMouseWheel, false); - } else { - container.removeEventListener('mousewheel', onMouseWheel, false); - } - document.removeEventListener('keydown', onDocumentKeyDown, false); - window.removeEventListener('resize', onWindowResize, false); - - } - - init(); - this.animate = animate; - this.unload = unload; - - - this.__defineGetter__('time', function() { - return this._time || 0; - }); - - this.__defineSetter__('time', function(t) { - var validMorphs = []; - var morphDict = this.points.morphTargetDictionary; - for(var k in morphDict) { - if(k.indexOf('morphPadding') < 0) { - validMorphs.push(morphDict[k]); - } - } - validMorphs.sort(); - var l = validMorphs.length-1; - var scaledt = t*l+1; - var index = Math.floor(scaledt); - for (i=0;i= 0) { - this.points.morphTargetInfluences[lastIndex] = 1 - leftover; - } - this.points.morphTargetInfluences[index] = leftover; - this._time = t; - }); - - this.addData = addData; - this.createPoints = createPoints; - this.renderer = renderer; - this.scene = scene; - - return this; - -}; diff --git a/plugins/Sidebar/media-globe/three.min.js b/plugins/Sidebar/media-globe/three.min.js deleted file mode 100644 index a88b4afa..00000000 --- a/plugins/Sidebar/media-globe/three.min.js +++ /dev/null @@ -1,814 +0,0 @@ -// threejs.org/license -'use strict';var THREE={REVISION:"69"};"object"===typeof module&&(module.exports=THREE);void 0===Math.sign&&(Math.sign=function(a){return 0>a?-1:0>16&255)/255;this.g=(a>>8&255)/255;this.b=(a&255)/255;return this},setRGB:function(a,b,c){this.r=a;this.g=b;this.b=c;return this},setHSL:function(a,b,c){if(0===b)this.r=this.g=this.b=c;else{var d=function(a,b,c){0>c&&(c+=1);1c?b:c<2/3?a+6*(b-a)*(2/3-c):a};b=.5>=c?c*(1+b):c+b-c*b;c=2*c-b;this.r=d(c,b,a+1/3);this.g=d(c,b,a);this.b=d(c,b,a-1/3)}return this},setStyle:function(a){if(/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.test(a))return a=/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.exec(a),this.r=Math.min(255,parseInt(a[1],10))/255,this.g=Math.min(255,parseInt(a[2],10))/255,this.b=Math.min(255,parseInt(a[3],10))/255,this;if(/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.test(a))return a=/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.exec(a),this.r= -Math.min(100,parseInt(a[1],10))/100,this.g=Math.min(100,parseInt(a[2],10))/100,this.b=Math.min(100,parseInt(a[3],10))/100,this;if(/^\#([0-9a-f]{6})$/i.test(a))return a=/^\#([0-9a-f]{6})$/i.exec(a),this.setHex(parseInt(a[1],16)),this;if(/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.test(a))return a=/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(a),this.setHex(parseInt(a[1]+a[1]+a[2]+a[2]+a[3]+a[3],16)),this;if(/^(\w+)$/i.test(a))return this.setHex(THREE.ColorKeywords[a]),this},copy:function(a){this.r=a.r;this.g= -a.g;this.b=a.b;return this},copyGammaToLinear:function(a){this.r=a.r*a.r;this.g=a.g*a.g;this.b=a.b*a.b;return this},copyLinearToGamma:function(a){this.r=Math.sqrt(a.r);this.g=Math.sqrt(a.g);this.b=Math.sqrt(a.b);return this},convertGammaToLinear:function(){var a=this.r,b=this.g,c=this.b;this.r=a*a;this.g=b*b;this.b=c*c;return this},convertLinearToGamma:function(){this.r=Math.sqrt(this.r);this.g=Math.sqrt(this.g);this.b=Math.sqrt(this.b);return this},getHex:function(){return 255*this.r<<16^255*this.g<< -8^255*this.b<<0},getHexString:function(){return("000000"+this.getHex().toString(16)).slice(-6)},getHSL:function(a){a=a||{h:0,s:0,l:0};var b=this.r,c=this.g,d=this.b,e=Math.max(b,c,d),f=Math.min(b,c,d),g,h=(f+e)/2;if(f===e)f=g=0;else{var k=e-f,f=.5>=h?k/(e+f):k/(2-e-f);switch(e){case b:g=(c-d)/k+(cf&&c>b?(c=2*Math.sqrt(1+c-f-b),this._w=(k-g)/c,this._x=.25*c,this._y=(a+e)/c,this._z=(d+h)/c):f>b?(c=2*Math.sqrt(1+f-c-b),this._w=(d-h)/c,this._x=(a+e)/c,this._y= -.25*c,this._z=(g+k)/c):(c=2*Math.sqrt(1+b-c-f),this._w=(e-a)/c,this._x=(d+h)/c,this._y=(g+k)/c,this._z=.25*c);this.onChangeCallback();return this},setFromUnitVectors:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3);b=c.dot(d)+1;1E-6>b?(b=0,Math.abs(c.x)>Math.abs(c.z)?a.set(-c.y,c.x,0):a.set(0,-c.z,c.y)):a.crossVectors(c,d);this._x=a.x;this._y=a.y;this._z=a.z;this._w=b;this.normalize();return this}}(),inverse:function(){this.conjugate().normalize();return this},conjugate:function(){this._x*= --1;this._y*=-1;this._z*=-1;this.onChangeCallback();return this},dot:function(a){return this._x*a._x+this._y*a._y+this._z*a._z+this._w*a._w},lengthSq:function(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w},length:function(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)},normalize:function(){var a=this.length();0===a?(this._z=this._y=this._x=0,this._w=1):(a=1/a,this._x*=a,this._y*=a,this._z*=a,this._w*=a);this.onChangeCallback();return this}, -multiply:function(a,b){return void 0!==b?(console.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(a,b)):this.multiplyQuaternions(this,a)},multiplyQuaternions:function(a,b){var c=a._x,d=a._y,e=a._z,f=a._w,g=b._x,h=b._y,k=b._z,n=b._w;this._x=c*n+f*g+d*k-e*h;this._y=d*n+f*h+e*g-c*k;this._z=e*n+f*k+c*h-d*g;this._w=f*n-c*g-d*h-e*k;this.onChangeCallback();return this},multiplyVector3:function(a){console.warn("THREE.Quaternion: .multiplyVector3() has been removed. Use is now vector.applyQuaternion( quaternion ) instead."); -return a.applyQuaternion(this)},slerp:function(a,b){if(0===b)return this;if(1===b)return this.copy(a);var c=this._x,d=this._y,e=this._z,f=this._w,g=f*a._w+c*a._x+d*a._y+e*a._z;0>g?(this._w=-a._w,this._x=-a._x,this._y=-a._y,this._z=-a._z,g=-g):this.copy(a);if(1<=g)return this._w=f,this._x=c,this._y=d,this._z=e,this;var h=Math.acos(g),k=Math.sqrt(1-g*g);if(.001>Math.abs(k))return this._w=.5*(f+this._w),this._x=.5*(c+this._x),this._y=.5*(d+this._y),this._z=.5*(e+this._z),this;g=Math.sin((1-b)*h)/k;h= -Math.sin(b*h)/k;this._w=f*g+this._w*h;this._x=c*g+this._x*h;this._y=d*g+this._y*h;this._z=e*g+this._z*h;this.onChangeCallback();return this},equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._w===this._w},fromArray:function(a,b){void 0===b&&(b=0);this._x=a[b];this._y=a[b+1];this._z=a[b+2];this._w=a[b+3];this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._w;return a},onChange:function(a){this.onChangeCallback= -a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Quaternion(this._x,this._y,this._z,this._w)}};THREE.Quaternion.slerp=function(a,b,c,d){return c.copy(a).slerp(b,d)};THREE.Vector2=function(a,b){this.x=a||0;this.y=b||0}; -THREE.Vector2.prototype={constructor:THREE.Vector2,set:function(a,b){this.x=a;this.y=b;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;return this},add:function(a, -b){if(void 0!==b)return console.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;return this},addScalar:function(a){this.x+=a;this.y+=a;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;return this}, -subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;return this},multiply:function(a){this.x*=a.x;this.y*=a.y;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;return this},divide:function(a){this.x/=a.x;this.y/=a.y;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a):this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector2,b=new THREE.Vector2);a.set(c,c);b.set(d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);return this}, -roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);return this},negate:function(){this.x=-this.x;this.y=-this.y;return this},dot:function(a){return this.x*a.x+this.y*a.y},lengthSq:function(){return this.x*this.x+this.y*this.y},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y)},normalize:function(){return this.divideScalar(this.length())},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b= -this.x-a.x;a=this.y-a.y;return b*b+a*a},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;return a},clone:function(){return new THREE.Vector2(this.x,this.y)}}; -THREE.Vector3=function(a,b,c){this.x=a||0;this.y=b||0;this.z=c||0}; -THREE.Vector3.prototype={constructor:THREE.Vector3,set:function(a,b,c){this.x=a;this.y=b;this.z=c;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:throw Error("index is out of range: "+ -a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."), -this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;return this},multiply:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(a,b);this.x*=a.x;this.y*=a.y;this.z*=a.z;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;return this},multiplyVectors:function(a,b){this.x=a.x*b.x;this.y= -a.y*b.y;this.z=a.z*b.z;return this},applyEuler:function(){var a;return function(b){!1===b instanceof THREE.Euler&&console.error("THREE.Vector3: .applyEuler() now expects a Euler rotation rather than a Vector3 and order.");void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromEuler(b));return this}}(),applyAxisAngle:function(){var a;return function(b,c){void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromAxisAngle(b,c));return this}}(),applyMatrix3:function(a){var b=this.x, -c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[3]*c+a[6]*d;this.y=a[1]*b+a[4]*c+a[7]*d;this.z=a[2]*b+a[5]*c+a[8]*d;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12];this.y=a[1]*b+a[5]*c+a[9]*d+a[13];this.z=a[2]*b+a[6]*c+a[10]*d+a[14];return this},applyProjection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;var e=1/(a[3]*b+a[7]*c+a[11]*d+a[15]);this.x=(a[0]*b+a[4]*c+a[8]*d+a[12])*e;this.y=(a[1]*b+a[5]*c+a[9]*d+a[13])*e;this.z= -(a[2]*b+a[6]*c+a[10]*d+a[14])*e;return this},applyQuaternion:function(a){var b=this.x,c=this.y,d=this.z,e=a.x,f=a.y,g=a.z;a=a.w;var h=a*b+f*d-g*c,k=a*c+g*b-e*d,n=a*d+e*c-f*b,b=-e*b-f*c-g*d;this.x=h*a+b*-e+k*-g-n*-f;this.y=k*a+b*-f+n*-e-h*-g;this.z=n*a+b*-g+h*-f-k*-e;return this},project:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.projectionMatrix,a.getInverse(b.matrixWorld));return this.applyProjection(a)}}(),unproject:function(){var a;return function(b){void 0=== -a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.matrixWorld,a.getInverse(b.projectionMatrix));return this.applyProjection(a)}}(),transformDirection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d;this.y=a[1]*b+a[5]*c+a[9]*d;this.z=a[2]*b+a[6]*c+a[10]*d;this.normalize();return this},divide:function(a){this.x/=a.x;this.y/=a.y;this.z/=a.z;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a):this.z=this.y=this.x=0;return this},min:function(a){this.x> -a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3,b=new THREE.Vector3);a.set(c,c,c);b.set(d,d,d);return this.clamp(a, -b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z); -return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)},normalize:function(){return this.divideScalar(this.length())},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/ -b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;return this},cross:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(a,b);var c=this.x,d=this.y,e=this.z;this.x=d*a.z-e*a.y;this.y=e*a.x-c*a.z;this.z=c*a.y-d*a.x;return this},crossVectors:function(a,b){var c=a.x,d=a.y,e=a.z,f=b.x,g=b.y,h=b.z;this.x=d*h-e*g;this.y=e*f-c*h;this.z=c*g-d*f;return this}, -projectOnVector:function(){var a,b;return function(c){void 0===a&&(a=new THREE.Vector3);a.copy(c).normalize();b=this.dot(a);return this.copy(a).multiplyScalar(b)}}(),projectOnPlane:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);a.copy(this).projectOnVector(b);return this.sub(a)}}(),reflect:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);return this.sub(a.copy(b).multiplyScalar(2*this.dot(b)))}}(),angleTo:function(a){a=this.dot(a)/(this.length()*a.length()); -return Math.acos(THREE.Math.clamp(a,-1,1))},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b=this.x-a.x,c=this.y-a.y;a=this.z-a.z;return b*b+c*c+a*a},setEulerFromRotationMatrix:function(a,b){console.error("THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotationMatrix() instead.")},setEulerFromQuaternion:function(a,b){console.error("THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.")}, -getPositionFromMatrix:function(a){console.warn("THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().");return this.setFromMatrixPosition(a)},getScaleFromMatrix:function(a){console.warn("THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().");return this.setFromMatrixScale(a)},getColumnFromMatrix:function(a,b){console.warn("THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().");return this.setFromMatrixColumn(a, -b)},setFromMatrixPosition:function(a){this.x=a.elements[12];this.y=a.elements[13];this.z=a.elements[14];return this},setFromMatrixScale:function(a){var b=this.set(a.elements[0],a.elements[1],a.elements[2]).length(),c=this.set(a.elements[4],a.elements[5],a.elements[6]).length();a=this.set(a.elements[8],a.elements[9],a.elements[10]).length();this.x=b;this.y=c;this.z=a;return this},setFromMatrixColumn:function(a,b){var c=4*a,d=b.elements;this.x=d[c];this.y=d[c+1];this.z=d[c+2];return this},equals:function(a){return a.x=== -this.x&&a.y===this.y&&a.z===this.z},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;return a},clone:function(){return new THREE.Vector3(this.x,this.y,this.z)}};THREE.Vector4=function(a,b,c,d){this.x=a||0;this.y=b||0;this.z=c||0;this.w=void 0!==d?d:1}; -THREE.Vector4.prototype={constructor:THREE.Vector4,set:function(a,b,c,d){this.x=a;this.y=b;this.z=c;this.w=d;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setW:function(a){this.w=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;case 3:this.w=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x; -case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;this.w=void 0!==a.w?a.w:1;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;this.w+=a.w;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;this.w+=a;return this}, -addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;this.w=a.w+b.w;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;this.w-=a.w;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;this.w=a.w-b.w;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;this.w*=a;return this},applyMatrix4:function(a){var b= -this.x,c=this.y,d=this.z,e=this.w;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12]*e;this.y=a[1]*b+a[5]*c+a[9]*d+a[13]*e;this.z=a[2]*b+a[6]*c+a[10]*d+a[14]*e;this.w=a[3]*b+a[7]*c+a[11]*d+a[15]*e;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a,this.w*=a):(this.z=this.y=this.x=0,this.w=1);return this},setAxisAngleFromQuaternion:function(a){this.w=2*Math.acos(a.w);var b=Math.sqrt(1-a.w*a.w);1E-4>b?(this.x=1,this.z=this.y=0):(this.x=a.x/b,this.y=a.y/b,this.z=a.z/b);return this}, -setAxisAngleFromRotationMatrix:function(a){var b,c,d;a=a.elements;var e=a[0];d=a[4];var f=a[8],g=a[1],h=a[5],k=a[9];c=a[2];b=a[6];var n=a[10];if(.01>Math.abs(d-g)&&.01>Math.abs(f-c)&&.01>Math.abs(k-b)){if(.1>Math.abs(d+g)&&.1>Math.abs(f+c)&&.1>Math.abs(k+b)&&.1>Math.abs(e+h+n-3))return this.set(1,0,0,0),this;a=Math.PI;e=(e+1)/2;h=(h+1)/2;n=(n+1)/2;d=(d+g)/4;f=(f+c)/4;k=(k+b)/4;e>h&&e>n?.01>e?(b=0,d=c=.707106781):(b=Math.sqrt(e),c=d/b,d=f/b):h>n?.01>h?(b=.707106781,c=0,d=.707106781):(c=Math.sqrt(h), -b=d/c,d=k/c):.01>n?(c=b=.707106781,d=0):(d=Math.sqrt(n),b=f/d,c=k/d);this.set(b,c,d,a);return this}a=Math.sqrt((b-k)*(b-k)+(f-c)*(f-c)+(g-d)*(g-d));.001>Math.abs(a)&&(a=1);this.x=(b-k)/a;this.y=(f-c)/a;this.z=(g-d)/a;this.w=Math.acos((e+h+n-1)/2);return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);this.w>a.w&&(this.w=a.w);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);this.wb.w&&(this.w=b.w);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector4,b=new THREE.Vector4);a.set(c,c,c,c);b.set(d,d,d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);this.w=Math.floor(this.w); -return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);this.w=Math.ceil(this.w);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);this.w=Math.round(this.w);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);this.w=0>this.w?Math.ceil(this.w):Math.floor(this.w); -return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;this.w=-this.w;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z+this.w*a.w},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)+Math.abs(this.w)},normalize:function(){return this.divideScalar(this.length())}, -setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;this.w+=(a.w-this.w)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z&&a.w===this.w},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];this.w=a[b+3];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]= -this.z;a[b+3]=this.w;return a},clone:function(){return new THREE.Vector4(this.x,this.y,this.z,this.w)}};THREE.Euler=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._order=d||THREE.Euler.DefaultOrder};THREE.Euler.RotationOrders="XYZ YZX ZXY XZY YXZ ZYX".split(" ");THREE.Euler.DefaultOrder="XYZ"; -THREE.Euler.prototype={constructor:THREE.Euler,_x:0,_y:0,_z:0,_order:THREE.Euler.DefaultOrder,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get order(){return this._order},set order(a){this._order=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._order=d||this._order;this.onChangeCallback();return this},copy:function(a){this._x= -a._x;this._y=a._y;this._z=a._z;this._order=a._order;this.onChangeCallback();return this},setFromRotationMatrix:function(a,b){var c=THREE.Math.clamp,d=a.elements,e=d[0],f=d[4],g=d[8],h=d[1],k=d[5],n=d[9],p=d[2],q=d[6],d=d[10];b=b||this._order;"XYZ"===b?(this._y=Math.asin(c(g,-1,1)),.99999>Math.abs(g)?(this._x=Math.atan2(-n,d),this._z=Math.atan2(-f,e)):(this._x=Math.atan2(q,k),this._z=0)):"YXZ"===b?(this._x=Math.asin(-c(n,-1,1)),.99999>Math.abs(n)?(this._y=Math.atan2(g,d),this._z=Math.atan2(h,k)):(this._y= -Math.atan2(-p,e),this._z=0)):"ZXY"===b?(this._x=Math.asin(c(q,-1,1)),.99999>Math.abs(q)?(this._y=Math.atan2(-p,d),this._z=Math.atan2(-f,k)):(this._y=0,this._z=Math.atan2(h,e))):"ZYX"===b?(this._y=Math.asin(-c(p,-1,1)),.99999>Math.abs(p)?(this._x=Math.atan2(q,d),this._z=Math.atan2(h,e)):(this._x=0,this._z=Math.atan2(-f,k))):"YZX"===b?(this._z=Math.asin(c(h,-1,1)),.99999>Math.abs(h)?(this._x=Math.atan2(-n,k),this._y=Math.atan2(-p,e)):(this._x=0,this._y=Math.atan2(g,d))):"XZY"===b?(this._z=Math.asin(-c(f, --1,1)),.99999>Math.abs(f)?(this._x=Math.atan2(q,k),this._y=Math.atan2(g,e)):(this._x=Math.atan2(-n,d),this._y=0)):console.warn("THREE.Euler: .setFromRotationMatrix() given unsupported order: "+b);this._order=b;this.onChangeCallback();return this},setFromQuaternion:function(a,b,c){var d=THREE.Math.clamp,e=a.x*a.x,f=a.y*a.y,g=a.z*a.z,h=a.w*a.w;b=b||this._order;"XYZ"===b?(this._x=Math.atan2(2*(a.x*a.w-a.y*a.z),h-e-f+g),this._y=Math.asin(d(2*(a.x*a.z+a.y*a.w),-1,1)),this._z=Math.atan2(2*(a.z*a.w-a.x* -a.y),h+e-f-g)):"YXZ"===b?(this._x=Math.asin(d(2*(a.x*a.w-a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h-e-f+g),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h-e+f-g)):"ZXY"===b?(this._x=Math.asin(d(2*(a.x*a.w+a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.y*a.w-a.z*a.x),h-e-f+g),this._z=Math.atan2(2*(a.z*a.w-a.x*a.y),h-e+f-g)):"ZYX"===b?(this._x=Math.atan2(2*(a.x*a.w+a.z*a.y),h-e-f+g),this._y=Math.asin(d(2*(a.y*a.w-a.x*a.z),-1,1)),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h+e-f-g)):"YZX"===b?(this._x=Math.atan2(2* -(a.x*a.w-a.z*a.y),h-e+f-g),this._y=Math.atan2(2*(a.y*a.w-a.x*a.z),h+e-f-g),this._z=Math.asin(d(2*(a.x*a.y+a.z*a.w),-1,1))):"XZY"===b?(this._x=Math.atan2(2*(a.x*a.w+a.y*a.z),h-e+f-g),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h+e-f-g),this._z=Math.asin(d(2*(a.z*a.w-a.x*a.y),-1,1))):console.warn("THREE.Euler: .setFromQuaternion() given unsupported order: "+b);this._order=b;if(!1!==c)this.onChangeCallback();return this},reorder:function(){var a=new THREE.Quaternion;return function(b){a.setFromEuler(this); -this.setFromQuaternion(a,b)}}(),equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._order===this._order},fromArray:function(a){this._x=a[0];this._y=a[1];this._z=a[2];void 0!==a[3]&&(this._order=a[3]);this.onChangeCallback();return this},toArray:function(){return[this._x,this._y,this._z,this._order]},onChange:function(a){this.onChangeCallback=a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Euler(this._x,this._y,this._z,this._order)}}; -THREE.Line3=function(a,b){this.start=void 0!==a?a:new THREE.Vector3;this.end=void 0!==b?b:new THREE.Vector3}; -THREE.Line3.prototype={constructor:THREE.Line3,set:function(a,b){this.start.copy(a);this.end.copy(b);return this},copy:function(a){this.start.copy(a.start);this.end.copy(a.end);return this},center:function(a){return(a||new THREE.Vector3).addVectors(this.start,this.end).multiplyScalar(.5)},delta:function(a){return(a||new THREE.Vector3).subVectors(this.end,this.start)},distanceSq:function(){return this.start.distanceToSquared(this.end)},distance:function(){return this.start.distanceTo(this.end)},at:function(a, -b){var c=b||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},closestPointToPointParameter:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d){a.subVectors(c,this.start);b.subVectors(this.end,this.start);var e=b.dot(b),e=b.dot(a)/e;d&&(e=THREE.Math.clamp(e,0,1));return e}}(),closestPointToPoint:function(a,b,c){a=this.closestPointToPointParameter(a,b);c=c||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},applyMatrix4:function(a){this.start.applyMatrix4(a); -this.end.applyMatrix4(a);return this},equals:function(a){return a.start.equals(this.start)&&a.end.equals(this.end)},clone:function(){return(new THREE.Line3).copy(this)}};THREE.Box2=function(a,b){this.min=void 0!==a?a:new THREE.Vector2(Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector2(-Infinity,-Infinity)}; -THREE.Box2.prototype={constructor:THREE.Box2,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector2).set((a.x-this.min.x)/(this.max.x-this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.y -this.max.y?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector2).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector2;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},translate:function(a){this.min.add(a);this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&& -a.max.equals(this.max)},clone:function(){return(new THREE.Box2).copy(this)}};THREE.Box3=function(a,b){this.min=void 0!==a?a:new THREE.Vector3(Infinity,Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector3(-Infinity,-Infinity,-Infinity)}; -THREE.Box3.prototype={constructor:THREE.Box3,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y||a.zthis.max.z?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y&&this.min.z<=a.min.z&&a.max.z<=this.max.z?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector3).set((a.x-this.min.x)/(this.max.x- -this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y),(a.z-this.min.z)/(this.max.z-this.min.z))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.ythis.max.y||a.max.zthis.max.z?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector3).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),getBoundingSphere:function(){var a= -new THREE.Vector3;return function(b){b=b||new THREE.Sphere;b.center=this.center();b.radius=.5*this.size(a).length();return b}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},applyMatrix4:function(){var a=[new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3];return function(b){a[0].set(this.min.x,this.min.y, -this.min.z).applyMatrix4(b);a[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(b);a[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(b);a[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(b);a[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(b);a[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(b);a[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(b);a[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(b);this.makeEmpty();this.setFromPoints(a);return this}}(),translate:function(a){this.min.add(a); -this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&&a.max.equals(this.max)},clone:function(){return(new THREE.Box3).copy(this)}};THREE.Matrix3=function(){this.elements=new Float32Array([1,0,0,0,1,0,0,0,1]);0this.determinant()&&(g=-g);c.x=f[12];c.y=f[13];c.z=f[14];b.elements.set(this.elements);c=1/g;var f=1/h,n=1/k;b.elements[0]*=c;b.elements[1]*= -c;b.elements[2]*=c;b.elements[4]*=f;b.elements[5]*=f;b.elements[6]*=f;b.elements[8]*=n;b.elements[9]*=n;b.elements[10]*=n;d.setFromRotationMatrix(b);e.x=g;e.y=h;e.z=k;return this}}(),makeFrustum:function(a,b,c,d,e,f){var g=this.elements;g[0]=2*e/(b-a);g[4]=0;g[8]=(b+a)/(b-a);g[12]=0;g[1]=0;g[5]=2*e/(d-c);g[9]=(d+c)/(d-c);g[13]=0;g[2]=0;g[6]=0;g[10]=-(f+e)/(f-e);g[14]=-2*f*e/(f-e);g[3]=0;g[7]=0;g[11]=-1;g[15]=0;return this},makePerspective:function(a,b,c,d){a=c*Math.tan(THREE.Math.degToRad(.5*a)); -var e=-a;return this.makeFrustum(e*b,a*b,e,a,c,d)},makeOrthographic:function(a,b,c,d,e,f){var g=this.elements,h=b-a,k=c-d,n=f-e;g[0]=2/h;g[4]=0;g[8]=0;g[12]=-((b+a)/h);g[1]=0;g[5]=2/k;g[9]=0;g[13]=-((c+d)/k);g[2]=0;g[6]=0;g[10]=-2/n;g[14]=-((f+e)/n);g[3]=0;g[7]=0;g[11]=0;g[15]=1;return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements;return[a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15]]},clone:function(){return(new THREE.Matrix4).fromArray(this.elements)}}; -THREE.Ray=function(a,b){this.origin=void 0!==a?a:new THREE.Vector3;this.direction=void 0!==b?b:new THREE.Vector3}; -THREE.Ray.prototype={constructor:THREE.Ray,set:function(a,b){this.origin.copy(a);this.direction.copy(b);return this},copy:function(a){this.origin.copy(a.origin);this.direction.copy(a.direction);return this},at:function(a,b){return(b||new THREE.Vector3).copy(this.direction).multiplyScalar(a).add(this.origin)},recast:function(){var a=new THREE.Vector3;return function(b){this.origin.copy(this.at(b,a));return this}}(),closestPointToPoint:function(a,b){var c=b||new THREE.Vector3;c.subVectors(a,this.origin); -var d=c.dot(this.direction);return 0>d?c.copy(this.origin):c.copy(this.direction).multiplyScalar(d).add(this.origin)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){var c=a.subVectors(b,this.origin).dot(this.direction);if(0>c)return this.origin.distanceTo(b);a.copy(this.direction).multiplyScalar(c).add(this.origin);return a.distanceTo(b)}}(),distanceSqToSegment:function(a,b,c,d){var e=a.clone().add(b).multiplyScalar(.5),f=b.clone().sub(a).normalize(),g=.5*a.distanceTo(b),h= -this.origin.clone().sub(e);a=-this.direction.dot(f);b=h.dot(this.direction);var k=-h.dot(f),n=h.lengthSq(),p=Math.abs(1-a*a),q,m;0<=p?(h=a*k-b,q=a*b-k,m=g*p,0<=h?q>=-m?q<=m?(g=1/p,h*=g,q*=g,a=h*(h+a*q+2*b)+q*(a*h+q+2*k)+n):(q=g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):(q=-g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):q<=-m?(h=Math.max(0,-(-a*g+b)),q=0f)return null;f=Math.sqrt(f-e);e=d-f; -d+=f;return 0>e&&0>d?null:0>e?this.at(d,c):this.at(e,c)}}(),isIntersectionPlane:function(a){var b=a.distanceToPoint(this.origin);return 0===b||0>a.normal.dot(this.direction)*b?!0:!1},distanceToPlane:function(a){var b=a.normal.dot(this.direction);if(0==b)return 0==a.distanceToPoint(this.origin)?0:null;a=-(this.origin.dot(a.normal)+a.constant)/b;return 0<=a?a:null},intersectPlane:function(a,b){var c=this.distanceToPlane(a);return null===c?null:this.at(c,b)},isIntersectionBox:function(){var a=new THREE.Vector3; -return function(b){return null!==this.intersectBox(b,a)}}(),intersectBox:function(a,b){var c,d,e,f,g;d=1/this.direction.x;f=1/this.direction.y;g=1/this.direction.z;var h=this.origin;0<=d?(c=(a.min.x-h.x)*d,d*=a.max.x-h.x):(c=(a.max.x-h.x)*d,d*=a.min.x-h.x);0<=f?(e=(a.min.y-h.y)*f,f*=a.max.y-h.y):(e=(a.max.y-h.y)*f,f*=a.min.y-h.y);if(c>f||e>d)return null;if(e>c||c!==c)c=e;if(fg||e>d)return null;if(e>c||c!== -c)c=e;if(gd?null:this.at(0<=c?c:d,b)},intersectTriangle:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Vector3;return function(e,f,g,h,k){b.subVectors(f,e);c.subVectors(g,e);d.crossVectors(b,c);f=this.direction.dot(d);if(0f)h=-1,f=-f;else return null;a.subVectors(this.origin,e);e=h*this.direction.dot(c.crossVectors(a,c));if(0>e)return null;g=h*this.direction.dot(b.cross(a));if(0>g||e+g>f)return null; -e=-h*a.dot(d);return 0>e?null:this.at(e/f,k)}}(),applyMatrix4:function(a){this.direction.add(this.origin).applyMatrix4(a);this.origin.applyMatrix4(a);this.direction.sub(this.origin);this.direction.normalize();return this},equals:function(a){return a.origin.equals(this.origin)&&a.direction.equals(this.direction)},clone:function(){return(new THREE.Ray).copy(this)}};THREE.Sphere=function(a,b){this.center=void 0!==a?a:new THREE.Vector3;this.radius=void 0!==b?b:0}; -THREE.Sphere.prototype={constructor:THREE.Sphere,set:function(a,b){this.center.copy(a);this.radius=b;return this},setFromPoints:function(){var a=new THREE.Box3;return function(b,c){var d=this.center;void 0!==c?d.copy(c):a.setFromPoints(b).center(d);for(var e=0,f=0,g=b.length;f=this.radius},containsPoint:function(a){return a.distanceToSquared(this.center)<= -this.radius*this.radius},distanceToPoint:function(a){return a.distanceTo(this.center)-this.radius},intersectsSphere:function(a){var b=this.radius+a.radius;return a.center.distanceToSquared(this.center)<=b*b},clampPoint:function(a,b){var c=this.center.distanceToSquared(a),d=b||new THREE.Vector3;d.copy(a);c>this.radius*this.radius&&(d.sub(this.center).normalize(),d.multiplyScalar(this.radius).add(this.center));return d},getBoundingBox:function(a){a=a||new THREE.Box3;a.set(this.center,this.center);a.expandByScalar(this.radius); -return a},applyMatrix4:function(a){this.center.applyMatrix4(a);this.radius*=a.getMaxScaleOnAxis();return this},translate:function(a){this.center.add(a);return this},equals:function(a){return a.center.equals(this.center)&&a.radius===this.radius},clone:function(){return(new THREE.Sphere).copy(this)}}; -THREE.Frustum=function(a,b,c,d,e,f){this.planes=[void 0!==a?a:new THREE.Plane,void 0!==b?b:new THREE.Plane,void 0!==c?c:new THREE.Plane,void 0!==d?d:new THREE.Plane,void 0!==e?e:new THREE.Plane,void 0!==f?f:new THREE.Plane]}; -THREE.Frustum.prototype={constructor:THREE.Frustum,set:function(a,b,c,d,e,f){var g=this.planes;g[0].copy(a);g[1].copy(b);g[2].copy(c);g[3].copy(d);g[4].copy(e);g[5].copy(f);return this},copy:function(a){for(var b=this.planes,c=0;6>c;c++)b[c].copy(a.planes[c]);return this},setFromMatrix:function(a){var b=this.planes,c=a.elements;a=c[0];var d=c[1],e=c[2],f=c[3],g=c[4],h=c[5],k=c[6],n=c[7],p=c[8],q=c[9],m=c[10],r=c[11],t=c[12],s=c[13],u=c[14],c=c[15];b[0].setComponents(f-a,n-g,r-p,c-t).normalize();b[1].setComponents(f+ -a,n+g,r+p,c+t).normalize();b[2].setComponents(f+d,n+h,r+q,c+s).normalize();b[3].setComponents(f-d,n-h,r-q,c-s).normalize();b[4].setComponents(f-e,n-k,r-m,c-u).normalize();b[5].setComponents(f+e,n+k,r+m,c+u).normalize();return this},intersectsObject:function(){var a=new THREE.Sphere;return function(b){var c=b.geometry;null===c.boundingSphere&&c.computeBoundingSphere();a.copy(c.boundingSphere);a.applyMatrix4(b.matrixWorld);return this.intersectsSphere(a)}}(),intersectsSphere:function(a){var b=this.planes, -c=a.center;a=-a.radius;for(var d=0;6>d;d++)if(b[d].distanceToPoint(c)e;e++){var f=d[e];a.x=0g&&0>f)return!1}return!0}}(), -containsPoint:function(a){for(var b=this.planes,c=0;6>c;c++)if(0>b[c].distanceToPoint(a))return!1;return!0},clone:function(){return(new THREE.Frustum).copy(this)}};THREE.Plane=function(a,b){this.normal=void 0!==a?a:new THREE.Vector3(1,0,0);this.constant=void 0!==b?b:0}; -THREE.Plane.prototype={constructor:THREE.Plane,set:function(a,b){this.normal.copy(a);this.constant=b;return this},setComponents:function(a,b,c,d){this.normal.set(a,b,c);this.constant=d;return this},setFromNormalAndCoplanarPoint:function(a,b){this.normal.copy(a);this.constant=-b.dot(this.normal);return this},setFromCoplanarPoints:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d,e){d=a.subVectors(e,d).cross(b.subVectors(c,d)).normalize();this.setFromNormalAndCoplanarPoint(d, -c);return this}}(),copy:function(a){this.normal.copy(a.normal);this.constant=a.constant;return this},normalize:function(){var a=1/this.normal.length();this.normal.multiplyScalar(a);this.constant*=a;return this},negate:function(){this.constant*=-1;this.normal.negate();return this},distanceToPoint:function(a){return this.normal.dot(a)+this.constant},distanceToSphere:function(a){return this.distanceToPoint(a.center)-a.radius},projectPoint:function(a,b){return this.orthoPoint(a,b).sub(a).negate()},orthoPoint:function(a, -b){var c=this.distanceToPoint(a);return(b||new THREE.Vector3).copy(this.normal).multiplyScalar(c)},isIntersectionLine:function(a){var b=this.distanceToPoint(a.start);a=this.distanceToPoint(a.end);return 0>b&&0a&&0f||1e;e++)8==e||13==e||18==e||23==e?b[e]="-":14==e?b[e]="4":(2>=c&&(c=33554432+16777216*Math.random()|0),d=c&15,c>>=4,b[e]=a[19==e?d&3|8:d]);return b.join("")}}(),clamp:function(a,b,c){return ac?c:a},clampBottom:function(a,b){return a=c)return 1;a=(a-b)/(c-b);return a*a*(3-2*a)},smootherstep:function(a,b,c){if(a<=b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*a*(a*(6*a-15)+10)},random16:function(){return(65280*Math.random()+255*Math.random())/65535},randInt:function(a,b){return a+Math.floor(Math.random()*(b-a+1))},randFloat:function(a,b){return a+Math.random()*(b-a)},randFloatSpread:function(a){return a*(.5-Math.random())},degToRad:function(){var a=Math.PI/180;return function(b){return b*a}}(),radToDeg:function(){var a= -180/Math.PI;return function(b){return b*a}}(),isPowerOfTwo:function(a){return 0===(a&a-1)&&0!==a}}; -THREE.Spline=function(a){function b(a,b,c,d,e,f,g){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*g+(-3*(b-c)-2*a-d)*f+a*e+b}this.points=a;var c=[],d={x:0,y:0,z:0},e,f,g,h,k,n,p,q,m;this.initFromArray=function(a){this.points=[];for(var b=0;bthis.points.length-2?this.points.length-1:f+1;c[3]=f>this.points.length-3?this.points.length-1:f+ -2;n=this.points[c[0]];p=this.points[c[1]];q=this.points[c[2]];m=this.points[c[3]];h=g*g;k=g*h;d.x=b(n.x,p.x,q.x,m.x,g,h,k);d.y=b(n.y,p.y,q.y,m.y,g,h,k);d.z=b(n.z,p.z,q.z,m.z,g,h,k);return d};this.getControlPointsArray=function(){var a,b,c=this.points.length,d=[];for(a=0;a=b.x+b.y}}(); -THREE.Triangle.prototype={constructor:THREE.Triangle,set:function(a,b,c){this.a.copy(a);this.b.copy(b);this.c.copy(c);return this},setFromPointsAndIndices:function(a,b,c,d){this.a.copy(a[b]);this.b.copy(a[c]);this.c.copy(a[d]);return this},copy:function(a){this.a.copy(a.a);this.b.copy(a.b);this.c.copy(a.c);return this},area:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){a.subVectors(this.c,this.b);b.subVectors(this.a,this.b);return.5*a.cross(b).length()}}(),midpoint:function(a){return(a|| -new THREE.Vector3).addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)},normal:function(a){return THREE.Triangle.normal(this.a,this.b,this.c,a)},plane:function(a){return(a||new THREE.Plane).setFromCoplanarPoints(this.a,this.b,this.c)},barycoordFromPoint:function(a,b){return THREE.Triangle.barycoordFromPoint(a,this.a,this.b,this.c,b)},containsPoint:function(a){return THREE.Triangle.containsPoint(a,this.a,this.b,this.c)},equals:function(a){return a.a.equals(this.a)&&a.b.equals(this.b)&&a.c.equals(this.c)}, -clone:function(){return(new THREE.Triangle).copy(this)}};THREE.Clock=function(a){this.autoStart=void 0!==a?a:!0;this.elapsedTime=this.oldTime=this.startTime=0;this.running=!1}; -THREE.Clock.prototype={constructor:THREE.Clock,start:function(){this.oldTime=this.startTime=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now();this.running=!0},stop:function(){this.getElapsedTime();this.running=!1},getElapsedTime:function(){this.getDelta();return this.elapsedTime},getDelta:function(){var a=0;this.autoStart&&!this.running&&this.start();if(this.running){var b=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now(), -a=.001*(b-this.oldTime);this.oldTime=b;this.elapsedTime+=a}return a}};THREE.EventDispatcher=function(){}; -THREE.EventDispatcher.prototype={constructor:THREE.EventDispatcher,apply:function(a){a.addEventListener=THREE.EventDispatcher.prototype.addEventListener;a.hasEventListener=THREE.EventDispatcher.prototype.hasEventListener;a.removeEventListener=THREE.EventDispatcher.prototype.removeEventListener;a.dispatchEvent=THREE.EventDispatcher.prototype.dispatchEvent},addEventListener:function(a,b){void 0===this._listeners&&(this._listeners={});var c=this._listeners;void 0===c[a]&&(c[a]=[]);-1===c[a].indexOf(b)&& -c[a].push(b)},hasEventListener:function(a,b){if(void 0===this._listeners)return!1;var c=this._listeners;return void 0!==c[a]&&-1!==c[a].indexOf(b)?!0:!1},removeEventListener:function(a,b){if(void 0!==this._listeners){var c=this._listeners[a];if(void 0!==c){var d=c.indexOf(b);-1!==d&&c.splice(d,1)}}},dispatchEvent:function(a){if(void 0!==this._listeners){var b=this._listeners[a.type];if(void 0!==b){a.target=this;for(var c=[],d=b.length,e=0;eza?-1:1;h[4*a]=la.x;h[4*a+1]=la.y;h[4*a+2]=la.z;h[4*a+3]=Ga}if(void 0===this.attributes.index||void 0===this.attributes.position||void 0===this.attributes.normal||void 0===this.attributes.uv)console.warn("Missing required attributes (index, position, normal or uv) in BufferGeometry.computeTangents()");else{var c=this.attributes.index.array,d=this.attributes.position.array, -e=this.attributes.normal.array,f=this.attributes.uv.array,g=d.length/3;void 0===this.attributes.tangent&&this.addAttribute("tangent",new THREE.BufferAttribute(new Float32Array(4*g),4));for(var h=this.attributes.tangent.array,k=[],n=[],p=0;ps;s++)t=a[3*c+s],-1==m[t]?(q[2*s]=t,q[2*s+1]=-1,p++):m[t]k.index+b)for(k={start:f,count:0,index:g},h.push(k),p=0;6>p;p+=2)s=q[p+1],-1p;p+=2)t=q[p],s=q[p+1],-1===s&&(s=g++),m[t]=s,r[s]=t,e[f++]=s-k.index,k.count++}this.reorderBuffers(e,r,g);return this.offsets=h},merge:function(){console.log("BufferGeometry.merge(): TODO")},normalizeNormals:function(){for(var a=this.attributes.normal.array,b,c,d,e=0,f=a.length;ed?-1:1,e.vertexTangents[c]=new THREE.Vector4(w.x,w.y,w.z,d);this.hasTangents=!0},computeLineDistances:function(){for(var a=0,b=this.vertices,c=0,d=b.length;cd;d++)if(e[d]==e[(d+1)%3]){a.push(f);break}for(f=a.length-1;0<=f;f--)for(e=a[f],this.faces.splice(e,1),c=0,g=this.faceVertexUvs.length;ca.opacity)h.transparent=a.transparent;void 0!==a.depthTest&&(h.depthTest=a.depthTest);void 0!==a.depthWrite&&(h.depthWrite=a.depthWrite);void 0!==a.visible&&(h.visible=a.visible);void 0!==a.flipSided&&(h.side=THREE.BackSide);void 0!==a.doubleSided&&(h.side=THREE.DoubleSide);void 0!==a.wireframe&&(h.wireframe=a.wireframe);void 0!==a.vertexColors&&("face"=== -a.vertexColors?h.vertexColors=THREE.FaceColors:a.vertexColors&&(h.vertexColors=THREE.VertexColors));a.colorDiffuse?h.color=e(a.colorDiffuse):a.DbgColor&&(h.color=a.DbgColor);a.colorSpecular&&(h.specular=e(a.colorSpecular));a.colorAmbient&&(h.ambient=e(a.colorAmbient));a.colorEmissive&&(h.emissive=e(a.colorEmissive));a.transparency&&(h.opacity=a.transparency);a.specularCoef&&(h.shininess=a.specularCoef);a.mapDiffuse&&b&&d(h,"map",a.mapDiffuse,a.mapDiffuseRepeat,a.mapDiffuseOffset,a.mapDiffuseWrap, -a.mapDiffuseAnisotropy);a.mapLight&&b&&d(h,"lightMap",a.mapLight,a.mapLightRepeat,a.mapLightOffset,a.mapLightWrap,a.mapLightAnisotropy);a.mapBump&&b&&d(h,"bumpMap",a.mapBump,a.mapBumpRepeat,a.mapBumpOffset,a.mapBumpWrap,a.mapBumpAnisotropy);a.mapNormal&&b&&d(h,"normalMap",a.mapNormal,a.mapNormalRepeat,a.mapNormalOffset,a.mapNormalWrap,a.mapNormalAnisotropy);a.mapSpecular&&b&&d(h,"specularMap",a.mapSpecular,a.mapSpecularRepeat,a.mapSpecularOffset,a.mapSpecularWrap,a.mapSpecularAnisotropy);a.mapAlpha&& -b&&d(h,"alphaMap",a.mapAlpha,a.mapAlphaRepeat,a.mapAlphaOffset,a.mapAlphaWrap,a.mapAlphaAnisotropy);a.mapBumpScale&&(h.bumpScale=a.mapBumpScale);a.mapNormal?(g=THREE.ShaderLib.normalmap,k=THREE.UniformsUtils.clone(g.uniforms),k.tNormal.value=h.normalMap,a.mapNormalFactor&&k.uNormalScale.value.set(a.mapNormalFactor,a.mapNormalFactor),h.map&&(k.tDiffuse.value=h.map,k.enableDiffuse.value=!0),h.specularMap&&(k.tSpecular.value=h.specularMap,k.enableSpecular.value=!0),h.lightMap&&(k.tAO.value=h.lightMap, -k.enableAO.value=!0),k.diffuse.value.setHex(h.color),k.specular.value.setHex(h.specular),k.ambient.value.setHex(h.ambient),k.shininess.value=h.shininess,void 0!==h.opacity&&(k.opacity.value=h.opacity),g=new THREE.ShaderMaterial({fragmentShader:g.fragmentShader,vertexShader:g.vertexShader,uniforms:k,lights:!0,fog:!0}),h.transparent&&(g.transparent=!0)):g=new THREE[g](h);void 0!==a.DbgName&&(g.name=a.DbgName);return g}}; -THREE.Loader.Handlers={handlers:[],add:function(a,b){this.handlers.push(a,b)},get:function(a){for(var b=0,c=this.handlers.length;bg;g++)m=y[k++],v=u[2*m],m=u[2*m+1],v=new THREE.Vector2(v,m),2!==g&&c.faceVertexUvs[d][h].push(v),0!==g&&c.faceVertexUvs[d][h+1].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]),s.normal.copy(r.normal));if(t)for(d=0;4>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++], -G[q++],G[q]),2!==d&&r.vertexNormals.push(t),0!==d&&s.vertexNormals.push(t);p&&(p=y[k++],p=w[p],r.color.setHex(p),s.color.setHex(p));if(b)for(d=0;4>d;d++)p=y[k++],p=w[p],2!==d&&r.vertexColors.push(new THREE.Color(p)),0!==d&&s.vertexColors.push(new THREE.Color(p));c.faces.push(r);c.faces.push(s)}else{r=new THREE.Face3;r.a=y[k++];r.b=y[k++];r.c=y[k++];h&&(h=y[k++],r.materialIndex=h);h=c.faces.length;if(d)for(d=0;dg;g++)m=y[k++],v=u[2*m],m=u[2*m+1], -v=new THREE.Vector2(v,m),c.faceVertexUvs[d][h].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]));if(t)for(d=0;3>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++],G[q++],G[q]),r.vertexNormals.push(t);p&&(p=y[k++],r.color.setHex(w[p]));if(b)for(d=0;3>d;d++)p=y[k++],r.vertexColors.push(new THREE.Color(w[p]));c.faces.push(r)}})(d);(function(){var b=void 0!==a.influencesPerVertex?a.influencesPerVertex:2;if(a.skinWeights)for(var d=0,g=a.skinWeights.length;dthis.opacity&&(a.opacity=this.opacity);!1!==this.transparent&&(a.transparent=this.transparent);!1!==this.wireframe&&(a.wireframe=this.wireframe);return a},clone:function(a){void 0===a&&(a=new THREE.Material);a.name=this.name;a.side=this.side;a.opacity=this.opacity;a.transparent=this.transparent;a.blending=this.blending;a.blendSrc=this.blendSrc;a.blendDst=this.blendDst;a.blendEquation=this.blendEquation;a.depthTest=this.depthTest;a.depthWrite=this.depthWrite;a.polygonOffset=this.polygonOffset;a.polygonOffsetFactor= -this.polygonOffsetFactor;a.polygonOffsetUnits=this.polygonOffsetUnits;a.alphaTest=this.alphaTest;a.overdraw=this.overdraw;a.visible=this.visible;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Material.prototype);THREE.MaterialIdCount=0; -THREE.LineBasicMaterial=function(a){THREE.Material.call(this);this.type="LineBasicMaterial";this.color=new THREE.Color(16777215);this.linewidth=1;this.linejoin=this.linecap="round";this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.LineBasicMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.LineBasicMaterial.prototype.clone=function(){var a=new THREE.LineBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.linecap=this.linecap;a.linejoin=this.linejoin;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; -THREE.LineDashedMaterial=function(a){THREE.Material.call(this);this.type="LineDashedMaterial";this.color=new THREE.Color(16777215);this.scale=this.linewidth=1;this.dashSize=3;this.gapSize=1;this.vertexColors=!1;this.fog=!0;this.setValues(a)};THREE.LineDashedMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.LineDashedMaterial.prototype.clone=function(){var a=new THREE.LineDashedMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.scale=this.scale;a.dashSize=this.dashSize;a.gapSize=this.gapSize;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; -THREE.MeshBasicMaterial=function(a){THREE.Material.call(this);this.type="MeshBasicMaterial";this.color=new THREE.Color(16777215);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphTargets=this.skinning=!1;this.setValues(a)}; -THREE.MeshBasicMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshBasicMaterial.prototype.clone=function(){var a=new THREE.MeshBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap; -a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;return a}; -THREE.MeshLambertMaterial=function(a){THREE.Material.call(this);this.type="MeshLambertMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.wrapAround=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth= -1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshLambertMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshLambertMaterial.prototype.clone=function(){var a=new THREE.MeshLambertMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog; -a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; -THREE.MeshPhongMaterial=function(a){THREE.Material.call(this);this.type="MeshPhongMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.specular=new THREE.Color(1118481);this.shininess=30;this.wrapAround=this.metal=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.bumpMap=this.lightMap=this.map=null;this.bumpScale=1;this.normalMap=null;this.normalScale=new THREE.Vector2(1,1);this.envMap=this.alphaMap=this.specularMap=null;this.combine= -THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshPhongMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshPhongMaterial.prototype.clone=function(){var a=new THREE.MeshPhongMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.specular.copy(this.specular);a.shininess=this.shininess;a.metal=this.metal;a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.bumpMap=this.bumpMap;a.bumpScale=this.bumpScale;a.normalMap=this.normalMap;a.normalScale.copy(this.normalScale); -a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; -THREE.MeshDepthMaterial=function(a){THREE.Material.call(this);this.type="MeshDepthMaterial";this.wireframe=this.morphTargets=!1;this.wireframeLinewidth=1;this.setValues(a)};THREE.MeshDepthMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshDepthMaterial.prototype.clone=function(){var a=new THREE.MeshDepthMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a}; -THREE.MeshNormalMaterial=function(a){THREE.Material.call(this,a);this.type="MeshNormalMaterial";this.shading=THREE.FlatShading;this.wireframe=!1;this.wireframeLinewidth=1;this.morphTargets=!1;this.setValues(a)};THREE.MeshNormalMaterial.prototype=Object.create(THREE.Material.prototype); -THREE.MeshNormalMaterial.prototype.clone=function(){var a=new THREE.MeshNormalMaterial;THREE.Material.prototype.clone.call(this,a);a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshFaceMaterial=function(a){this.uuid=THREE.Math.generateUUID();this.type="MeshFaceMaterial";this.materials=a instanceof Array?a:[]}; -THREE.MeshFaceMaterial.prototype={constructor:THREE.MeshFaceMaterial,toJSON:function(){for(var a={metadata:{version:4.2,type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type,materials:[]},b=0,c=this.materials.length;bf)){var m=b.origin.distanceTo(n);md.far||e.push({distance:m,point:k.clone().applyMatrix4(this.matrixWorld),face:null,faceIndex:null,object:this})}}}();THREE.Line.prototype.clone=function(a){void 0===a&&(a=new THREE.Line(this.geometry,this.material,this.mode));THREE.Object3D.prototype.clone.call(this,a);return a}; -THREE.Mesh=function(a,b){THREE.Object3D.call(this);this.type="Mesh";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.MeshBasicMaterial({color:16777215*Math.random()});this.updateMorphTargets()};THREE.Mesh.prototype=Object.create(THREE.Object3D.prototype); -THREE.Mesh.prototype.updateMorphTargets=function(){if(void 0!==this.geometry.morphTargets&&0g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this})}}}else for(s=p.position.array,t=k=0,w=s.length;k -g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this}))}}else if(k instanceof THREE.Geometry)for(t=this.material instanceof THREE.MeshFaceMaterial,s=!0===t?this.material.materials:null,r=g.precision,u=k.vertices,v=0,y=k.faces.length;vg.far||h.push({distance:x,point:K,face:G,faceIndex:v,object:this}))}}}();THREE.Mesh.prototype.clone=function(a,b){void 0===a&&(a=new THREE.Mesh(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a,b);return a};THREE.Bone=function(a){THREE.Object3D.call(this);this.skin=a};THREE.Bone.prototype=Object.create(THREE.Object3D.prototype); -THREE.Skeleton=function(a,b,c){this.useVertexTexture=void 0!==c?c:!0;this.identityMatrix=new THREE.Matrix4;a=a||[];this.bones=a.slice(0);this.useVertexTexture?(this.boneTextureHeight=this.boneTextureWidth=a=256h.end&&(h.end=e);b||(b=g)}}a.firstAnimation=b}; -THREE.MorphAnimMesh.prototype.setAnimationLabel=function(a,b,c){this.geometry.animations||(this.geometry.animations={});this.geometry.animations[a]={start:b,end:c}};THREE.MorphAnimMesh.prototype.playAnimation=function(a,b){var c=this.geometry.animations[a];c?(this.setFrameRange(c.start,c.end),this.duration=(c.end-c.start)/b*1E3,this.time=0):console.warn("animation["+a+"] undefined")}; -THREE.MorphAnimMesh.prototype.updateAnimation=function(a){var b=this.duration/this.length;this.time+=this.direction*a;if(this.mirroredLoop){if(this.time>this.duration||0>this.time)this.direction*=-1,this.time>this.duration&&(this.time=this.duration,this.directionBackwards=!0),0>this.time&&(this.time=0,this.directionBackwards=!1)}else this.time%=this.duration,0>this.time&&(this.time+=this.duration);a=this.startKeyframe+THREE.Math.clamp(Math.floor(this.time/b),0,this.length-1);a!==this.currentKeyframe&& -(this.morphTargetInfluences[this.lastKeyframe]=0,this.morphTargetInfluences[this.currentKeyframe]=1,this.morphTargetInfluences[a]=0,this.lastKeyframe=this.currentKeyframe,this.currentKeyframe=a);b=this.time%b/b;this.directionBackwards&&(b=1-b);this.morphTargetInfluences[this.currentKeyframe]=b;this.morphTargetInfluences[this.lastKeyframe]=1-b}; -THREE.MorphAnimMesh.prototype.interpolateTargets=function(a,b,c){for(var d=this.morphTargetInfluences,e=0,f=d.length;e=this.objects[d].distance)this.objects[d-1].object.visible=!1,this.objects[d].object.visible=!0;else break;for(;dthis.scale.x||c.push({distance:d,point:this.position,face:null,object:this})}}();THREE.Sprite.prototype.clone=function(a){void 0===a&&(a=new THREE.Sprite(this.material));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.Particle=THREE.Sprite; -THREE.LensFlare=function(a,b,c,d,e){THREE.Object3D.call(this);this.lensFlares=[];this.positionScreen=new THREE.Vector3;this.customUpdateCallback=void 0;void 0!==a&&this.add(a,b,c,d,e)};THREE.LensFlare.prototype=Object.create(THREE.Object3D.prototype); -THREE.LensFlare.prototype.add=function(a,b,c,d,e,f){void 0===b&&(b=-1);void 0===c&&(c=0);void 0===f&&(f=1);void 0===e&&(e=new THREE.Color(16777215));void 0===d&&(d=THREE.NormalBlending);c=Math.min(c,Math.max(0,c));this.lensFlares.push({texture:a,size:b,distance:c,x:0,y:0,z:0,scale:1,rotation:1,opacity:f,color:e,blending:d})}; -THREE.LensFlare.prototype.updateLensFlares=function(){var a,b=this.lensFlares.length,c,d=2*-this.positionScreen.x,e=2*-this.positionScreen.y;for(a=0;a dashSize ) {\n\t\tdiscard;\n\t}\n\tgl_FragColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.fog_fragment, -"}"].join("\n")},depth:{uniforms:{mNear:{type:"f",value:1},mFar:{type:"f",value:2E3},opacity:{type:"f",value:1}},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float mNear;\nuniform float mFar;\nuniform float opacity;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment, -"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\t#else\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\t#endif\n\tfloat color = 1.0 - smoothstep( mNear, mFar, depth );\n\tgl_FragColor = vec4( vec3( color ), opacity );\n}"].join("\n")},normal:{uniforms:{opacity:{type:"f",value:1}},vertexShader:["varying vec3 vNormal;",THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvNormal = normalize( normalMatrix * normal );", -THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float opacity;\nvarying vec3 vNormal;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = vec4( 0.5 * normalize( vNormal ) + 0.5, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},normalmap:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{enableAO:{type:"i", -value:0},enableDiffuse:{type:"i",value:0},enableSpecular:{type:"i",value:0},enableReflection:{type:"i",value:0},enableDisplacement:{type:"i",value:0},tDisplacement:{type:"t",value:null},tDiffuse:{type:"t",value:null},tCube:{type:"t",value:null},tNormal:{type:"t",value:null},tSpecular:{type:"t",value:null},tAO:{type:"t",value:null},uNormalScale:{type:"v2",value:new THREE.Vector2(1,1)},uDisplacementBias:{type:"f",value:0},uDisplacementScale:{type:"f",value:1},diffuse:{type:"c",value:new THREE.Color(16777215)}, -specular:{type:"c",value:new THREE.Color(1118481)},ambient:{type:"c",value:new THREE.Color(16777215)},shininess:{type:"f",value:30},opacity:{type:"f",value:1},useRefract:{type:"i",value:0},refractionRatio:{type:"f",value:.98},reflectivity:{type:"f",value:.5},uOffset:{type:"v2",value:new THREE.Vector2(0,0)},uRepeat:{type:"v2",value:new THREE.Vector2(1,1)},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),fragmentShader:["uniform vec3 ambient;\nuniform vec3 diffuse;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\nuniform bool enableDiffuse;\nuniform bool enableSpecular;\nuniform bool enableAO;\nuniform bool enableReflection;\nuniform sampler2D tDiffuse;\nuniform sampler2D tNormal;\nuniform sampler2D tSpecular;\nuniform sampler2D tAO;\nuniform samplerCube tCube;\nuniform vec2 uNormalScale;\nuniform bool useRefract;\nuniform float refractionRatio;\nuniform float reflectivity;\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nuniform vec3 ambientLightColor;\n#if MAX_DIR_LIGHTS > 0\n\tuniform vec3 directionalLightColor[ MAX_DIR_LIGHTS ];\n\tuniform vec3 directionalLightDirection[ MAX_DIR_LIGHTS ];\n#endif\n#if MAX_HEMI_LIGHTS > 0\n\tuniform vec3 hemisphereLightSkyColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightGroundColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightDirection[ MAX_HEMI_LIGHTS ];\n#endif\n#if MAX_POINT_LIGHTS > 0\n\tuniform vec3 pointLightColor[ MAX_POINT_LIGHTS ];\n\tuniform vec3 pointLightPosition[ MAX_POINT_LIGHTS ];\n\tuniform float pointLightDistance[ MAX_POINT_LIGHTS ];\n#endif\n#if MAX_SPOT_LIGHTS > 0\n\tuniform vec3 spotLightColor[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightPosition[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightDirection[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightAngleCos[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightExponent[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightDistance[ MAX_SPOT_LIGHTS ];\n#endif\n#ifdef WRAP_AROUND\n\tuniform vec3 wrapRGB;\n#endif\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", -THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment,"\tgl_FragColor = vec4( vec3( 1.0 ), opacity );\n\tvec3 specularTex = vec3( 1.0 );\n\tvec3 normalTex = texture2D( tNormal, vUv ).xyz * 2.0 - 1.0;\n\tnormalTex.xy *= uNormalScale;\n\tnormalTex = normalize( normalTex );\n\tif( enableDiffuse ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 texelColor = texture2D( tDiffuse, vUv );\n\t\t\ttexelColor.xyz *= texelColor.xyz;\n\t\t\tgl_FragColor = gl_FragColor * texelColor;\n\t\t#else\n\t\t\tgl_FragColor = gl_FragColor * texture2D( tDiffuse, vUv );\n\t\t#endif\n\t}\n\tif( enableAO ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 aoColor = texture2D( tAO, vUv );\n\t\t\taoColor.xyz *= aoColor.xyz;\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * aoColor.xyz;\n\t\t#else\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * texture2D( tAO, vUv ).xyz;\n\t\t#endif\n\t}", -THREE.ShaderChunk.alphatest_fragment,"\tif( enableSpecular )\n\t\tspecularTex = texture2D( tSpecular, vUv ).xyz;\n\tmat3 tsb = mat3( normalize( vTangent ), normalize( vBinormal ), normalize( vNormal ) );\n\tvec3 finalNormal = tsb * normalTex;\n\t#ifdef FLIP_SIDED\n\t\tfinalNormal = -finalNormal;\n\t#endif\n\tvec3 normal = normalize( finalNormal );\n\tvec3 viewPosition = normalize( vViewPosition );\n\t#if MAX_POINT_LIGHTS > 0\n\t\tvec3 pointDiffuse = vec3( 0.0 );\n\t\tvec3 pointSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_POINT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( pointLightPosition[ i ], 1.0 );\n\t\t\tvec3 pointVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat pointDistance = 1.0;\n\t\t\tif ( pointLightDistance[ i ] > 0.0 )\n\t\t\t\tpointDistance = 1.0 - min( ( length( pointVector ) / pointLightDistance[ i ] ), 1.0 );\n\t\t\tpointVector = normalize( pointVector );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat pointDiffuseWeightFull = max( dot( normal, pointVector ), 0.0 );\n\t\t\t\tfloat pointDiffuseWeightHalf = max( 0.5 * dot( normal, pointVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 pointDiffuseWeight = mix( vec3( pointDiffuseWeightFull ), vec3( pointDiffuseWeightHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat pointDiffuseWeight = max( dot( normal, pointVector ), 0.0 );\n\t\t\t#endif\n\t\t\tpointDiffuse += pointDistance * pointLightColor[ i ] * diffuse * pointDiffuseWeight;\n\t\t\tvec3 pointHalfVector = normalize( pointVector + viewPosition );\n\t\t\tfloat pointDotNormalHalf = max( dot( normal, pointHalfVector ), 0.0 );\n\t\t\tfloat pointSpecularWeight = specularTex.r * max( pow( pointDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( pointVector, pointHalfVector ), 0.0 ), 5.0 );\n\t\t\tpointSpecular += schlick * pointLightColor[ i ] * pointSpecularWeight * pointDiffuseWeight * pointDistance * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\tvec3 spotDiffuse = vec3( 0.0 );\n\t\tvec3 spotSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_SPOT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( spotLightPosition[ i ], 1.0 );\n\t\t\tvec3 spotVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat spotDistance = 1.0;\n\t\t\tif ( spotLightDistance[ i ] > 0.0 )\n\t\t\t\tspotDistance = 1.0 - min( ( length( spotVector ) / spotLightDistance[ i ] ), 1.0 );\n\t\t\tspotVector = normalize( spotVector );\n\t\t\tfloat spotEffect = dot( spotLightDirection[ i ], normalize( spotLightPosition[ i ] - vWorldPosition ) );\n\t\t\tif ( spotEffect > spotLightAngleCos[ i ] ) {\n\t\t\t\tspotEffect = max( pow( max( spotEffect, 0.0 ), spotLightExponent[ i ] ), 0.0 );\n\t\t\t\t#ifdef WRAP_AROUND\n\t\t\t\t\tfloat spotDiffuseWeightFull = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t\tfloat spotDiffuseWeightHalf = max( 0.5 * dot( normal, spotVector ) + 0.5, 0.0 );\n\t\t\t\t\tvec3 spotDiffuseWeight = mix( vec3( spotDiffuseWeightFull ), vec3( spotDiffuseWeightHalf ), wrapRGB );\n\t\t\t\t#else\n\t\t\t\t\tfloat spotDiffuseWeight = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t#endif\n\t\t\t\tspotDiffuse += spotDistance * spotLightColor[ i ] * diffuse * spotDiffuseWeight * spotEffect;\n\t\t\t\tvec3 spotHalfVector = normalize( spotVector + viewPosition );\n\t\t\t\tfloat spotDotNormalHalf = max( dot( normal, spotHalfVector ), 0.0 );\n\t\t\t\tfloat spotSpecularWeight = specularTex.r * max( pow( spotDotNormalHalf, shininess ), 0.0 );\n\t\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( spotVector, spotHalfVector ), 0.0 ), 5.0 );\n\t\t\t\tspotSpecular += schlick * spotLightColor[ i ] * spotSpecularWeight * spotDiffuseWeight * spotDistance * specularNormalization * spotEffect;\n\t\t\t}\n\t\t}\n\t#endif\n\t#if MAX_DIR_LIGHTS > 0\n\t\tvec3 dirDiffuse = vec3( 0.0 );\n\t\tvec3 dirSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_DIR_LIGHTS; i++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( directionalLightDirection[ i ], 0.0 );\n\t\t\tvec3 dirVector = normalize( lDirection.xyz );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat directionalLightWeightingFull = max( dot( normal, dirVector ), 0.0 );\n\t\t\t\tfloat directionalLightWeightingHalf = max( 0.5 * dot( normal, dirVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 dirDiffuseWeight = mix( vec3( directionalLightWeightingFull ), vec3( directionalLightWeightingHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat dirDiffuseWeight = max( dot( normal, dirVector ), 0.0 );\n\t\t\t#endif\n\t\t\tdirDiffuse += directionalLightColor[ i ] * diffuse * dirDiffuseWeight;\n\t\t\tvec3 dirHalfVector = normalize( dirVector + viewPosition );\n\t\t\tfloat dirDotNormalHalf = max( dot( normal, dirHalfVector ), 0.0 );\n\t\t\tfloat dirSpecularWeight = specularTex.r * max( pow( dirDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( dirVector, dirHalfVector ), 0.0 ), 5.0 );\n\t\t\tdirSpecular += schlick * directionalLightColor[ i ] * dirSpecularWeight * dirDiffuseWeight * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\tvec3 hemiDiffuse = vec3( 0.0 );\n\t\tvec3 hemiSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_HEMI_LIGHTS; i ++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( hemisphereLightDirection[ i ], 0.0 );\n\t\t\tvec3 lVector = normalize( lDirection.xyz );\n\t\t\tfloat dotProduct = dot( normal, lVector );\n\t\t\tfloat hemiDiffuseWeight = 0.5 * dotProduct + 0.5;\n\t\t\tvec3 hemiColor = mix( hemisphereLightGroundColor[ i ], hemisphereLightSkyColor[ i ], hemiDiffuseWeight );\n\t\t\themiDiffuse += diffuse * hemiColor;\n\t\t\tvec3 hemiHalfVectorSky = normalize( lVector + viewPosition );\n\t\t\tfloat hemiDotNormalHalfSky = 0.5 * dot( normal, hemiHalfVectorSky ) + 0.5;\n\t\t\tfloat hemiSpecularWeightSky = specularTex.r * max( pow( max( hemiDotNormalHalfSky, 0.0 ), shininess ), 0.0 );\n\t\t\tvec3 lVectorGround = -lVector;\n\t\t\tvec3 hemiHalfVectorGround = normalize( lVectorGround + viewPosition );\n\t\t\tfloat hemiDotNormalHalfGround = 0.5 * dot( normal, hemiHalfVectorGround ) + 0.5;\n\t\t\tfloat hemiSpecularWeightGround = specularTex.r * max( pow( max( hemiDotNormalHalfGround, 0.0 ), shininess ), 0.0 );\n\t\t\tfloat dotProductGround = dot( normal, lVectorGround );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlickSky = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVector, hemiHalfVectorSky ), 0.0 ), 5.0 );\n\t\t\tvec3 schlickGround = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVectorGround, hemiHalfVectorGround ), 0.0 ), 5.0 );\n\t\t\themiSpecular += hemiColor * specularNormalization * ( schlickSky * hemiSpecularWeightSky * max( dotProduct, 0.0 ) + schlickGround * hemiSpecularWeightGround * max( dotProductGround, 0.0 ) );\n\t\t}\n\t#endif\n\tvec3 totalDiffuse = vec3( 0.0 );\n\tvec3 totalSpecular = vec3( 0.0 );\n\t#if MAX_DIR_LIGHTS > 0\n\t\ttotalDiffuse += dirDiffuse;\n\t\ttotalSpecular += dirSpecular;\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\ttotalDiffuse += hemiDiffuse;\n\t\ttotalSpecular += hemiSpecular;\n\t#endif\n\t#if MAX_POINT_LIGHTS > 0\n\t\ttotalDiffuse += pointDiffuse;\n\t\ttotalSpecular += pointSpecular;\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\ttotalDiffuse += spotDiffuse;\n\t\ttotalSpecular += spotSpecular;\n\t#endif\n\t#ifdef METAL\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient + totalSpecular );\n\t#else\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient ) + totalSpecular;\n\t#endif\n\tif ( enableReflection ) {\n\t\tvec3 vReflect;\n\t\tvec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );\n\t\tif ( useRefract ) {\n\t\t\tvReflect = refract( cameraToVertex, normal, refractionRatio );\n\t\t} else {\n\t\t\tvReflect = reflect( cameraToVertex, normal );\n\t\t}\n\t\tvec4 cubeColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );\n\t\t#ifdef GAMMA_INPUT\n\t\t\tcubeColor.xyz *= cubeColor.xyz;\n\t\t#endif\n\t\tgl_FragColor.xyz = mix( gl_FragColor.xyz, cubeColor.xyz, specularTex.r * reflectivity );\n\t}", -THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fragment,"}"].join("\n"),vertexShader:["attribute vec4 tangent;\nuniform vec2 uOffset;\nuniform vec2 uRepeat;\nuniform bool enableDisplacement;\n#ifdef VERTEX_TEXTURES\n\tuniform sampler2D tDisplacement;\n\tuniform float uDisplacementScale;\n\tuniform float uDisplacementBias;\n#endif\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", -THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.skinnormal_vertex,"\t#ifdef USE_SKINNING\n\t\tvNormal = normalize( normalMatrix * skinnedNormal.xyz );\n\t\tvec4 skinnedTangent = skinMatrix * vec4( tangent.xyz, 0.0 );\n\t\tvTangent = normalize( normalMatrix * skinnedTangent.xyz );\n\t#else\n\t\tvNormal = normalize( normalMatrix * normal );\n\t\tvTangent = normalize( normalMatrix * tangent.xyz );\n\t#endif\n\tvBinormal = normalize( cross( vNormal, vTangent ) * tangent.w );\n\tvUv = uv * uRepeat + uOffset;\n\tvec3 displacedPosition;\n\t#ifdef VERTEX_TEXTURES\n\t\tif ( enableDisplacement ) {\n\t\t\tvec3 dv = texture2D( tDisplacement, uv ).xyz;\n\t\t\tfloat df = uDisplacementScale * dv.x + uDisplacementBias;\n\t\t\tdisplacedPosition = position + normalize( normal ) * df;\n\t\t} else {\n\t\t\t#ifdef USE_SKINNING\n\t\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\t\tdisplacedPosition = skinned.xyz;\n\t\t\t#else\n\t\t\t\tdisplacedPosition = position;\n\t\t\t#endif\n\t\t}\n\t#else\n\t\t#ifdef USE_SKINNING\n\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\tdisplacedPosition = skinned.xyz;\n\t\t#else\n\t\t\tdisplacedPosition = position;\n\t\t#endif\n\t#endif\n\tvec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );\n\tvec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );\n\tgl_Position = projectionMatrix * mvPosition;", -THREE.ShaderChunk.logdepthbuf_vertex,"\tvWorldPosition = worldPosition.xyz;\n\tvViewPosition = -mvPosition.xyz;\n\t#ifdef USE_SHADOWMAP\n\t\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\t\t\tvShadowCoord[ i ] = shadowMatrix[ i ] * worldPosition;\n\t\t}\n\t#endif\n}"].join("\n")},cube:{uniforms:{tCube:{type:"t",value:null},tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvec4 worldPosition = modelMatrix * vec4( position, 1.0 );\n\tvWorldPosition = worldPosition.xyz;\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", -THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform samplerCube tCube;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},depthRGBA:{uniforms:{},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex, -"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:[THREE.ShaderChunk.logdepthbuf_pars_fragment,"vec4 pack_depth( const in float depth ) {\n\tconst vec4 bit_shift = vec4( 256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0 );\n\tconst vec4 bit_mask = vec4( 0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0 );\n\tvec4 res = mod( depth * bit_shift * vec4( 255 ), vec4( 256 ) ) / vec4( 255 );\n\tres -= res.xxyz * bit_mask;\n\treturn res;\n}\nvoid main() {", -THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragDepthEXT );\n\t#else\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );\n\t#endif\n}"].join("\n")}}; -THREE.WebGLRenderer=function(a){function b(a){var b=a.geometry;a=a.material;var c=b.vertices.length;if(a.attributes){void 0===b.__webglCustomAttributesList&&(b.__webglCustomAttributesList=[]);for(var d in a.attributes){var e=a.attributes[d];if(!e.__webglInitialized||e.createUniqueBuffers){e.__webglInitialized=!0;var f=1;"v2"===e.type?f=2:"v3"===e.type?f=3:"v4"===e.type?f=4:"c"===e.type&&(f=3);e.size=f;e.array=new Float32Array(c*f);e.buffer=l.createBuffer();e.buffer.belongsToAttribute=d;e.needsUpdate= -!0}b.__webglCustomAttributesList.push(e)}}}function c(a,b){var c=b.geometry,e=a.faces3,f=3*e.length,g=1*e.length,h=3*e.length,e=d(b,a);a.__vertexArray=new Float32Array(3*f);a.__normalArray=new Float32Array(3*f);a.__colorArray=new Float32Array(3*f);a.__uvArray=new Float32Array(2*f);1Aa;Aa++)Cb=ma[Aa],Ta[Sa]=Cb.x,Ta[Sa+1]=Cb.y,Ta[Sa+2]=Cb.z,Sa+=3;else for(Aa=0;3>Aa;Aa++)Ta[Sa]=pa.x,Ta[Sa+1]=pa.y,Ta[Sa+2]=pa.z,Sa+=3;l.bindBuffer(l.ARRAY_BUFFER,C.__webglNormalBuffer);l.bufferData(l.ARRAY_BUFFER, -Ta,S)}if(Kc&&ua){M=0;for(ea=N.length;MAa;Aa++)Oa=hb[Aa],sb[qb]=Oa.x,sb[qb+1]=Oa.y,qb+=2;0Aa;Aa++)Qb=za[Aa],fb[rb]=Qb.x,fb[rb+1]=Qb.y,rb+=2;0h&&(f[v].counter+=1,k=f[v].hash+"_"+f[v].counter,k in r||(p={id:rc++, -faces3:[],materialIndex:v,vertices:0,numMorphTargets:m,numMorphNormals:n},r[k]=p,q.push(p)));r[k].faces3.push(t);r[k].vertices+=3}a[g]=q;d.groupsNeedUpdate=!1}a=xb[d.id];g=0;for(e=a.length;gDa;Da++)kb[Da]=!J.autoScaleCubemaps||Ob||Tb?Tb?ua.image[Da].image:ua.image[Da]:R(ua.image[Da],$c);var ka=kb[0],Zb=THREE.Math.isPowerOfTwo(ka.width)&&THREE.Math.isPowerOfTwo(ka.height),ab=Q(ua.format),Fb=Q(ua.type);F(l.TEXTURE_CUBE_MAP,ua,Zb);for(Da=0;6>Da;Da++)if(Ob)for(var gb,$b=kb[Da].mipmaps,ga=0,Xb=$b.length;ga=Oc&&console.warn("WebGLRenderer: trying to use "+a+" texture units while this GPU supports only "+ -Oc);dc+=1;return a}function x(a,b){a._modelViewMatrix.multiplyMatrices(b.matrixWorldInverse,a.matrixWorld);a._normalMatrix.getNormalMatrix(a._modelViewMatrix)}function D(a,b,c,d){a[b]=c.r*c.r*d;a[b+1]=c.g*c.g*d;a[b+2]=c.b*c.b*d}function E(a,b,c,d){a[b]=c.r*d;a[b+1]=c.g*d;a[b+2]=c.b*d}function A(a){a!==Pc&&(l.lineWidth(a),Pc=a)}function B(a,b,c){Qc!==a&&(a?l.enable(l.POLYGON_OFFSET_FILL):l.disable(l.POLYGON_OFFSET_FILL),Qc=a);!a||Rc===b&&Sc===c||(l.polygonOffset(b,c),Rc=b,Sc=c)}function F(a,b,c){c? -(l.texParameteri(a,l.TEXTURE_WRAP_S,Q(b.wrapS)),l.texParameteri(a,l.TEXTURE_WRAP_T,Q(b.wrapT)),l.texParameteri(a,l.TEXTURE_MAG_FILTER,Q(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,Q(b.minFilter))):(l.texParameteri(a,l.TEXTURE_WRAP_S,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_WRAP_T,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_MAG_FILTER,T(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,T(b.minFilter)));(c=pa.get("EXT_texture_filter_anisotropic"))&&b.type!==THREE.FloatType&&(1b||a.height>b){var c=b/Math.max(a.width,a.height),d=document.createElement("canvas");d.width=Math.floor(a.width*c);d.height=Math.floor(a.height*c);d.getContext("2d").drawImage(a,0,0,a.width,a.height,0,0,d.width,d.height);console.log("THREE.WebGLRenderer:",a,"is too big ("+a.width+"x"+a.height+"). Resized to "+d.width+"x"+d.height+ -".");return d}return a}function H(a,b){l.bindRenderbuffer(l.RENDERBUFFER,a);b.depthBuffer&&!b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_COMPONENT16,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT,l.RENDERBUFFER,a)):b.depthBuffer&&b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_STENCIL,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a)):l.renderbufferStorage(l.RENDERBUFFER,l.RGBA4,b.width, -b.height)}function C(a){a instanceof THREE.WebGLRenderTargetCube?(l.bindTexture(l.TEXTURE_CUBE_MAP,a.__webglTexture),l.generateMipmap(l.TEXTURE_CUBE_MAP),l.bindTexture(l.TEXTURE_CUBE_MAP,null)):(l.bindTexture(l.TEXTURE_2D,a.__webglTexture),l.generateMipmap(l.TEXTURE_2D),l.bindTexture(l.TEXTURE_2D,null))}function T(a){return a===THREE.NearestFilter||a===THREE.NearestMipMapNearestFilter||a===THREE.NearestMipMapLinearFilter?l.NEAREST:l.LINEAR}function Q(a){var b;if(a===THREE.RepeatWrapping)return l.REPEAT; -if(a===THREE.ClampToEdgeWrapping)return l.CLAMP_TO_EDGE;if(a===THREE.MirroredRepeatWrapping)return l.MIRRORED_REPEAT;if(a===THREE.NearestFilter)return l.NEAREST;if(a===THREE.NearestMipMapNearestFilter)return l.NEAREST_MIPMAP_NEAREST;if(a===THREE.NearestMipMapLinearFilter)return l.NEAREST_MIPMAP_LINEAR;if(a===THREE.LinearFilter)return l.LINEAR;if(a===THREE.LinearMipMapNearestFilter)return l.LINEAR_MIPMAP_NEAREST;if(a===THREE.LinearMipMapLinearFilter)return l.LINEAR_MIPMAP_LINEAR;if(a===THREE.UnsignedByteType)return l.UNSIGNED_BYTE; -if(a===THREE.UnsignedShort4444Type)return l.UNSIGNED_SHORT_4_4_4_4;if(a===THREE.UnsignedShort5551Type)return l.UNSIGNED_SHORT_5_5_5_1;if(a===THREE.UnsignedShort565Type)return l.UNSIGNED_SHORT_5_6_5;if(a===THREE.ByteType)return l.BYTE;if(a===THREE.ShortType)return l.SHORT;if(a===THREE.UnsignedShortType)return l.UNSIGNED_SHORT;if(a===THREE.IntType)return l.INT;if(a===THREE.UnsignedIntType)return l.UNSIGNED_INT;if(a===THREE.FloatType)return l.FLOAT;if(a===THREE.AlphaFormat)return l.ALPHA;if(a===THREE.RGBFormat)return l.RGB; -if(a===THREE.RGBAFormat)return l.RGBA;if(a===THREE.LuminanceFormat)return l.LUMINANCE;if(a===THREE.LuminanceAlphaFormat)return l.LUMINANCE_ALPHA;if(a===THREE.AddEquation)return l.FUNC_ADD;if(a===THREE.SubtractEquation)return l.FUNC_SUBTRACT;if(a===THREE.ReverseSubtractEquation)return l.FUNC_REVERSE_SUBTRACT;if(a===THREE.ZeroFactor)return l.ZERO;if(a===THREE.OneFactor)return l.ONE;if(a===THREE.SrcColorFactor)return l.SRC_COLOR;if(a===THREE.OneMinusSrcColorFactor)return l.ONE_MINUS_SRC_COLOR;if(a=== -THREE.SrcAlphaFactor)return l.SRC_ALPHA;if(a===THREE.OneMinusSrcAlphaFactor)return l.ONE_MINUS_SRC_ALPHA;if(a===THREE.DstAlphaFactor)return l.DST_ALPHA;if(a===THREE.OneMinusDstAlphaFactor)return l.ONE_MINUS_DST_ALPHA;if(a===THREE.DstColorFactor)return l.DST_COLOR;if(a===THREE.OneMinusDstColorFactor)return l.ONE_MINUS_DST_COLOR;if(a===THREE.SrcAlphaSaturateFactor)return l.SRC_ALPHA_SATURATE;b=pa.get("WEBGL_compressed_texture_s3tc");if(null!==b){if(a===THREE.RGB_S3TC_DXT1_Format)return b.COMPRESSED_RGB_S3TC_DXT1_EXT; -if(a===THREE.RGBA_S3TC_DXT1_Format)return b.COMPRESSED_RGBA_S3TC_DXT1_EXT;if(a===THREE.RGBA_S3TC_DXT3_Format)return b.COMPRESSED_RGBA_S3TC_DXT3_EXT;if(a===THREE.RGBA_S3TC_DXT5_Format)return b.COMPRESSED_RGBA_S3TC_DXT5_EXT}b=pa.get("WEBGL_compressed_texture_pvrtc");if(null!==b){if(a===THREE.RGB_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;if(a===THREE.RGB_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;if(a===THREE.RGBA_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG; -if(a===THREE.RGBA_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG}b=pa.get("EXT_blend_minmax");if(null!==b){if(a===THREE.MinEquation)return b.MIN_EXT;if(a===THREE.MaxEquation)return b.MAX_EXT}return 0}console.log("THREE.WebGLRenderer",THREE.REVISION);a=a||{};var O=void 0!==a.canvas?a.canvas:document.createElement("canvas"),S=void 0!==a.context?a.context:null,X=void 0!==a.precision?a.precision:"highp",Y=void 0!==a.alpha?a.alpha:!1,la=void 0!==a.depth?a.depth:!0,ma=void 0!==a.stencil? -a.stencil:!0,ya=void 0!==a.antialias?a.antialias:!1,P=void 0!==a.premultipliedAlpha?a.premultipliedAlpha:!0,Ga=void 0!==a.preserveDrawingBuffer?a.preserveDrawingBuffer:!1,Fa=void 0!==a.logarithmicDepthBuffer?a.logarithmicDepthBuffer:!1,za=new THREE.Color(0),bb=0,cb=[],ob={},jb=[],Jb=[],Ib=[],yb=[],Ra=[];this.domElement=O;this.context=null;this.devicePixelRatio=void 0!==a.devicePixelRatio?a.devicePixelRatio:void 0!==self.devicePixelRatio?self.devicePixelRatio:1;this.sortObjects=this.autoClearStencil= -this.autoClearDepth=this.autoClearColor=this.autoClear=!0;this.shadowMapEnabled=this.gammaOutput=this.gammaInput=!1;this.shadowMapType=THREE.PCFShadowMap;this.shadowMapCullFace=THREE.CullFaceFront;this.shadowMapCascade=this.shadowMapDebug=!1;this.maxMorphTargets=8;this.maxMorphNormals=4;this.autoScaleCubemaps=!0;this.info={memory:{programs:0,geometries:0,textures:0},render:{calls:0,vertices:0,faces:0,points:0}};var J=this,hb=[],tc=null,Tc=null,Kb=-1,Oa=-1,ec=null,dc=0,Lb=-1,Mb=-1,pb=-1,Nb=-1,Ob=-1, -Xb=-1,Yb=-1,nb=-1,Qc=null,Rc=null,Sc=null,Pc=null,Pb=0,kc=0,lc=O.width,mc=O.height,Uc=0,Vc=0,wb=new Uint8Array(16),ib=new Uint8Array(16),Ec=new THREE.Frustum,Ac=new THREE.Matrix4,Gc=new THREE.Matrix4,Na=new THREE.Vector3,sa=new THREE.Vector3,fc=!0,Mc={ambient:[0,0,0],directional:{length:0,colors:[],positions:[]},point:{length:0,colors:[],positions:[],distances:[]},spot:{length:0,colors:[],positions:[],distances:[],directions:[],anglesCos:[],exponents:[]},hemi:{length:0,skyColors:[],groundColors:[], -positions:[]}},l;try{var Wc={alpha:Y,depth:la,stencil:ma,antialias:ya,premultipliedAlpha:P,preserveDrawingBuffer:Ga};l=S||O.getContext("webgl",Wc)||O.getContext("experimental-webgl",Wc);if(null===l){if(null!==O.getContext("webgl"))throw"Error creating WebGL context with your selected attributes.";throw"Error creating WebGL context.";}}catch(ad){console.error(ad)}void 0===l.getShaderPrecisionFormat&&(l.getShaderPrecisionFormat=function(){return{rangeMin:1,rangeMax:1,precision:1}});var pa=new THREE.WebGLExtensions(l); -pa.get("OES_texture_float");pa.get("OES_texture_float_linear");pa.get("OES_standard_derivatives");Fa&&pa.get("EXT_frag_depth");l.clearColor(0,0,0,1);l.clearDepth(1);l.clearStencil(0);l.enable(l.DEPTH_TEST);l.depthFunc(l.LEQUAL);l.frontFace(l.CCW);l.cullFace(l.BACK);l.enable(l.CULL_FACE);l.enable(l.BLEND);l.blendEquation(l.FUNC_ADD);l.blendFunc(l.SRC_ALPHA,l.ONE_MINUS_SRC_ALPHA);l.viewport(Pb,kc,lc,mc);l.clearColor(za.r,za.g,za.b,bb);this.context=l;var Oc=l.getParameter(l.MAX_TEXTURE_IMAGE_UNITS), -bd=l.getParameter(l.MAX_VERTEX_TEXTURE_IMAGE_UNITS),cd=l.getParameter(l.MAX_TEXTURE_SIZE),$c=l.getParameter(l.MAX_CUBE_MAP_TEXTURE_SIZE),sc=0b;b++)l.deleteFramebuffer(a.__webglFramebuffer[b]),l.deleteRenderbuffer(a.__webglRenderbuffer[b]); -else l.deleteFramebuffer(a.__webglFramebuffer),l.deleteRenderbuffer(a.__webglRenderbuffer);delete a.__webglFramebuffer;delete a.__webglRenderbuffer}J.info.memory.textures--},Dc=function(a){a=a.target;a.removeEventListener("dispose",Dc);Cc(a)},Yc=function(a){for(var b="__webglVertexBuffer __webglNormalBuffer __webglTangentBuffer __webglColorBuffer __webglUVBuffer __webglUV2Buffer __webglSkinIndicesBuffer __webglSkinWeightsBuffer __webglFaceBuffer __webglLineBuffer __webglLineDistanceBuffer".split(" "), -c=0,d=b.length;cd.numSupportedMorphTargets?(n.sort(p),n.length=d.numSupportedMorphTargets):n.length>d.numSupportedMorphNormals?n.sort(p):0===n.length&&n.push([0, -0]);for(m=0;mf;f++){a.__webglFramebuffer[f]=l.createFramebuffer();a.__webglRenderbuffer[f]=l.createRenderbuffer();l.texImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X+f,0,d,a.width,a.height,0,d,e,null);var g=a,h=l.TEXTURE_CUBE_MAP_POSITIVE_X+f;l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer[f]);l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,h,g.__webglTexture,0);H(a.__webglRenderbuffer[f],a)}c&&l.generateMipmap(l.TEXTURE_CUBE_MAP)}else a.__webglFramebuffer= -l.createFramebuffer(),a.__webglRenderbuffer=a.shareDepthFrom?a.shareDepthFrom.__webglRenderbuffer:l.createRenderbuffer(),l.bindTexture(l.TEXTURE_2D,a.__webglTexture),F(l.TEXTURE_2D,a,c),l.texImage2D(l.TEXTURE_2D,0,d,a.width,a.height,0,d,e,null),d=l.TEXTURE_2D,l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer),l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,d,a.__webglTexture,0),a.shareDepthFrom?a.depthBuffer&&!a.stencilBuffer?l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT, -l.RENDERBUFFER,a.__webglRenderbuffer):a.depthBuffer&&a.stencilBuffer&&l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a.__webglRenderbuffer):H(a.__webglRenderbuffer,a),c&&l.generateMipmap(l.TEXTURE_2D);b?l.bindTexture(l.TEXTURE_CUBE_MAP,null):l.bindTexture(l.TEXTURE_2D,null);l.bindRenderbuffer(l.RENDERBUFFER,null);l.bindFramebuffer(l.FRAMEBUFFER,null)}a?(b=b?a.__webglFramebuffer[a.activeCubeFace]:a.__webglFramebuffer,c=a.width,a=a.height,e=d=0):(b=null,c=lc,a=mc, -d=Pb,e=kc);b!==Tc&&(l.bindFramebuffer(l.FRAMEBUFFER,b),l.viewport(d,e,c,a),Tc=b);Uc=c;Vc=a};this.initMaterial=function(){console.warn("THREE.WebGLRenderer: .initMaterial() has been removed.")};this.addPrePlugin=function(){console.warn("THREE.WebGLRenderer: .addPrePlugin() has been removed.")};this.addPostPlugin=function(){console.warn("THREE.WebGLRenderer: .addPostPlugin() has been removed.")};this.updateShadowMap=function(){console.warn("THREE.WebGLRenderer: .updateShadowMap() has been removed.")}}; -THREE.WebGLRenderTarget=function(a,b,c){this.width=a;this.height=b;c=c||{};this.wrapS=void 0!==c.wrapS?c.wrapS:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==c.wrapT?c.wrapT:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==c.magFilter?c.magFilter:THREE.LinearFilter;this.minFilter=void 0!==c.minFilter?c.minFilter:THREE.LinearMipMapLinearFilter;this.anisotropy=void 0!==c.anisotropy?c.anisotropy:1;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.format=void 0!==c.format?c.format: -THREE.RGBAFormat;this.type=void 0!==c.type?c.type:THREE.UnsignedByteType;this.depthBuffer=void 0!==c.depthBuffer?c.depthBuffer:!0;this.stencilBuffer=void 0!==c.stencilBuffer?c.stencilBuffer:!0;this.generateMipmaps=!0;this.shareDepthFrom=null}; -THREE.WebGLRenderTarget.prototype={constructor:THREE.WebGLRenderTarget,setSize:function(a,b){this.width=a;this.height=b},clone:function(){var a=new THREE.WebGLRenderTarget(this.width,this.height);a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.offset.copy(this.offset);a.repeat.copy(this.repeat);a.format=this.format;a.type=this.type;a.depthBuffer=this.depthBuffer;a.stencilBuffer=this.stencilBuffer;a.generateMipmaps=this.generateMipmaps; -a.shareDepthFrom=this.shareDepthFrom;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube=function(a,b,c){THREE.WebGLRenderTarget.call(this,a,b,c);this.activeCubeFace=0};THREE.WebGLRenderTargetCube.prototype=Object.create(THREE.WebGLRenderTarget.prototype); -THREE.WebGLExtensions=function(a){var b={};this.get=function(c){if(void 0!==b[c])return b[c];var d;switch(c){case "OES_texture_float":d=a.getExtension("OES_texture_float");break;case "OES_texture_float_linear":d=a.getExtension("OES_texture_float_linear");break;case "OES_standard_derivatives":d=a.getExtension("OES_standard_derivatives");break;case "EXT_texture_filter_anisotropic":d=a.getExtension("EXT_texture_filter_anisotropic")||a.getExtension("MOZ_EXT_texture_filter_anisotropic")||a.getExtension("WEBKIT_EXT_texture_filter_anisotropic"); -break;case "WEBGL_compressed_texture_s3tc":d=a.getExtension("WEBGL_compressed_texture_s3tc")||a.getExtension("MOZ_WEBGL_compressed_texture_s3tc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc");break;case "WEBGL_compressed_texture_pvrtc":d=a.getExtension("WEBGL_compressed_texture_pvrtc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc");break;case "OES_element_index_uint":d=a.getExtension("OES_element_index_uint");break;case "EXT_blend_minmax":d=a.getExtension("EXT_blend_minmax");break; -case "EXT_frag_depth":d=a.getExtension("EXT_frag_depth")}null===d&&console.log("THREE.WebGLRenderer: "+c+" extension not supported.");return b[c]=d}}; -THREE.WebGLProgram=function(){var a=0;return function(b,c,d,e){var f=b.context,g=d.defines,h=d.__webglShader.uniforms,k=d.attributes,n=d.__webglShader.vertexShader,p=d.__webglShader.fragmentShader,q=d.index0AttributeName;void 0===q&&!0===e.morphTargets&&(q="position");var m="SHADOWMAP_TYPE_BASIC";e.shadowMapType===THREE.PCFShadowMap?m="SHADOWMAP_TYPE_PCF":e.shadowMapType===THREE.PCFSoftShadowMap&&(m="SHADOWMAP_TYPE_PCF_SOFT");var r,t;r=[];for(var s in g)t=g[s],!1!==t&&(t="#define "+s+" "+t,r.push(t)); -r=r.join("\n");g=f.createProgram();d instanceof THREE.RawShaderMaterial?b=d="":(d=["precision "+e.precision+" float;","precision "+e.precision+" int;",r,e.supportsVertexTextures?"#define VERTEX_TEXTURES":"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"","#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,"#define MAX_BONES "+ -e.maxBones,e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.skinning?"#define USE_SKINNING":"",e.useVertexTexture?"#define BONE_TEXTURE":"",e.morphTargets?"#define USE_MORPHTARGETS":"",e.morphNormals?"#define USE_MORPHNORMALS":"",e.wrapAround?"#define WRAP_AROUND": -"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.sizeAttenuation?"#define USE_SIZEATTENUATION":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 modelMatrix;\nuniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat4 viewMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\nattribute vec3 position;\nattribute vec3 normal;\nattribute vec2 uv;\nattribute vec2 uv2;\n#ifdef USE_COLOR\n\tattribute vec3 color;\n#endif\n#ifdef USE_MORPHTARGETS\n\tattribute vec3 morphTarget0;\n\tattribute vec3 morphTarget1;\n\tattribute vec3 morphTarget2;\n\tattribute vec3 morphTarget3;\n\t#ifdef USE_MORPHNORMALS\n\t\tattribute vec3 morphNormal0;\n\t\tattribute vec3 morphNormal1;\n\t\tattribute vec3 morphNormal2;\n\t\tattribute vec3 morphNormal3;\n\t#else\n\t\tattribute vec3 morphTarget4;\n\t\tattribute vec3 morphTarget5;\n\t\tattribute vec3 morphTarget6;\n\t\tattribute vec3 morphTarget7;\n\t#endif\n#endif\n#ifdef USE_SKINNING\n\tattribute vec4 skinIndex;\n\tattribute vec4 skinWeight;\n#endif\n"].join("\n"), -b=["precision "+e.precision+" float;","precision "+e.precision+" int;",e.bumpMap||e.normalMap?"#extension GL_OES_standard_derivatives : enable":"",r,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,e.alphaTest?"#define ALPHATEST "+e.alphaTest:"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"",e.useFog&&e.fog?"#define USE_FOG": -"",e.useFog&&e.fogExp?"#define FOG_EXP2":"",e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.metal?"#define METAL":"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP": -"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 viewMatrix;\nuniform vec3 cameraPosition;\n"].join("\n"));n=new THREE.WebGLShader(f,f.VERTEX_SHADER,d+n);p=new THREE.WebGLShader(f,f.FRAGMENT_SHADER,b+p);f.attachShader(g,n);f.attachShader(g,p);void 0!==q&&f.bindAttribLocation(g,0,q);f.linkProgram(g);!1===f.getProgramParameter(g,f.LINK_STATUS)&&(console.error("THREE.WebGLProgram: Could not initialise shader."), -console.error("gl.VALIDATE_STATUS",f.getProgramParameter(g,f.VALIDATE_STATUS)),console.error("gl.getError()",f.getError()));""!==f.getProgramInfoLog(g)&&console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",f.getProgramInfoLog(g));f.deleteShader(n);f.deleteShader(p);q="viewMatrix modelViewMatrix projectionMatrix normalMatrix modelMatrix cameraPosition morphTargetInfluences bindMatrix bindMatrixInverse".split(" ");e.useVertexTexture?(q.push("boneTexture"),q.push("boneTextureWidth"),q.push("boneTextureHeight")): -q.push("boneGlobalMatrices");e.logarithmicDepthBuffer&&q.push("logDepthBufFC");for(var u in h)q.push(u);h=q;u={};q=0;for(b=h.length;qT;T++)F[T]=new THREE.Vector3,A[T]=new THREE.Vector3;F=B.shadowCascadeNearZ[C];B=B.shadowCascadeFarZ[C];A[0].set(-1,-1,F);A[1].set(1,-1,F);A[2].set(-1,1,F);A[3].set(1,1,F);A[4].set(-1,-1,B);A[5].set(1,-1,B);A[6].set(-1,1,B);A[7].set(1,1,B);H.originalCamera=v;A=new THREE.Gyroscope;A.position.copy(x.shadowCascadeOffset);A.add(H);A.add(H.target);v.add(A);x.shadowCascadeArray[E]=H;console.log("Created virtualLight",H)}C= -x;F=E;B=C.shadowCascadeArray[F];B.position.copy(C.position);B.target.position.copy(C.target.position);B.lookAt(B.target);B.shadowCameraVisible=C.shadowCameraVisible;B.shadowDarkness=C.shadowDarkness;B.shadowBias=C.shadowCascadeBias[F];A=C.shadowCascadeNearZ[F];C=C.shadowCascadeFarZ[F];B=B.pointsFrustum;B[0].z=A;B[1].z=A;B[2].z=A;B[3].z=A;B[4].z=C;B[5].z=C;B[6].z=C;B[7].z=C;R[D]=H;D++}else R[D]=x,D++;u=0;for(K=R.length;uC;C++)F=B[C],F.copy(A[C]),F.unproject(E),F.applyMatrix4(D.matrixWorldInverse),F.xr.x&&(r.x=F.x),F.yr.y&&(r.y=F.y),F.zr.z&&(r.z=F.z);D.left=m.x;D.right=r.x;D.top=r.y;D.bottom=m.y;D.updateProjectionMatrix()}D=x.shadowMap;A=x.shadowMatrix;E=x.shadowCamera;E.position.setFromMatrixPosition(x.matrixWorld);t.setFromMatrixPosition(x.target.matrixWorld);E.lookAt(t);E.updateMatrixWorld();E.matrixWorldInverse.getInverse(E.matrixWorld);x.cameraHelper&& -(x.cameraHelper.visible=x.shadowCameraVisible);x.shadowCameraVisible&&x.cameraHelper.update();A.set(.5,0,0,.5,0,.5,0,.5,0,0,.5,.5,0,0,0,1);A.multiply(E.projectionMatrix);A.multiply(E.matrixWorldInverse);q.multiplyMatrices(E.projectionMatrix,E.matrixWorldInverse);p.setFromMatrix(q);a.setRenderTarget(D);a.clear();s.length=0;e(c,c,E);x=0;for(D=s.length;x 0 ) {\nfloat depth = gl_FragCoord.z / gl_FragCoord.w;\nfloat fogFactor = 0.0;\nif ( fogType == 1 ) {\nfogFactor = smoothstep( fogNear, fogFar, depth );\n} else {\nconst float LOG2 = 1.442695;\nfloat fogFactor = exp2( - fogDensity * fogDensity * depth * depth * LOG2 );\nfogFactor = 1.0 - clamp( fogFactor, 0.0, 1.0 );\n}\ngl_FragColor = mix( gl_FragColor, vec4( fogColor, gl_FragColor.w ), fogFactor );\n}\n}"].join("\n")); -w.compileShader(R);w.compileShader(H);w.attachShader(F,R);w.attachShader(F,H);w.linkProgram(F);D=F;v=w.getAttribLocation(D,"position");y=w.getAttribLocation(D,"uv");c=w.getUniformLocation(D,"uvOffset");d=w.getUniformLocation(D,"uvScale");e=w.getUniformLocation(D,"rotation");f=w.getUniformLocation(D,"scale");g=w.getUniformLocation(D,"color");h=w.getUniformLocation(D,"map");k=w.getUniformLocation(D,"opacity");n=w.getUniformLocation(D,"modelViewMatrix");p=w.getUniformLocation(D,"projectionMatrix");q= -w.getUniformLocation(D,"fogType");m=w.getUniformLocation(D,"fogDensity");r=w.getUniformLocation(D,"fogNear");t=w.getUniformLocation(D,"fogFar");s=w.getUniformLocation(D,"fogColor");u=w.getUniformLocation(D,"alphaTest");F=document.createElement("canvas");F.width=8;F.height=8;R=F.getContext("2d");R.fillStyle="white";R.fillRect(0,0,8,8);E=new THREE.Texture(F);E.needsUpdate=!0}w.useProgram(D);w.enableVertexAttribArray(v);w.enableVertexAttribArray(y);w.disable(w.CULL_FACE);w.enable(w.BLEND);w.bindBuffer(w.ARRAY_BUFFER, -K);w.vertexAttribPointer(v,2,w.FLOAT,!1,16,0);w.vertexAttribPointer(y,2,w.FLOAT,!1,16,8);w.bindBuffer(w.ELEMENT_ARRAY_BUFFER,x);w.uniformMatrix4fv(p,!1,B.projectionMatrix.elements);w.activeTexture(w.TEXTURE0);w.uniform1i(h,0);R=F=0;(H=A.fog)?(w.uniform3f(s,H.color.r,H.color.g,H.color.b),H instanceof THREE.Fog?(w.uniform1f(r,H.near),w.uniform1f(t,H.far),w.uniform1i(q,1),R=F=1):H instanceof THREE.FogExp2&&(w.uniform1f(m,H.density),w.uniform1i(q,2),R=F=2)):(w.uniform1i(q,0),R=F=0);for(var H=0,C=b.length;H< -C;H++){var T=b[H];T._modelViewMatrix.multiplyMatrices(B.matrixWorldInverse,T.matrixWorld);T.z=null===T.renderDepth?-T._modelViewMatrix.elements[14]:T.renderDepth}b.sort(G);for(var Q=[],H=0,C=b.length;Hq-1?0:q-1,r=q+1>e-1?e-1:q+1,t=0>p-1?0:p-1,s=p+1>d-1?d-1:p+1,u=[],v=[0,0,h[4*(q*d+p)]/255*b];u.push([-1,0,h[4*(q*d+t)]/255*b]);u.push([-1,-1,h[4*(m*d+t)]/255*b]);u.push([0,-1,h[4*(m*d+p)]/255*b]);u.push([1,-1,h[4*(m*d+s)]/255*b]);u.push([1,0,h[4*(q*d+s)]/255*b]);u.push([1,1,h[4*(r*d+s)]/255*b]);u.push([0,1,h[4*(r*d+p)]/255* -b]);u.push([-1,1,h[4*(r*d+t)]/255*b]);m=[];t=u.length;for(r=0;re)return null;var f=[],g=[],h=[],k,n,p;if(0=q--){console.log("Warning, unable to triangulate polygon!");break}k=n;e<=k&&(k=0);n=k+1;e<=n&&(n=0);p=n+1;e<=p&&(p=0);var m;a:{var r=m=void 0,t=void 0,s=void 0,u=void 0,v=void 0,y=void 0,G=void 0,w=void 0, -r=a[g[k]].x,t=a[g[k]].y,s=a[g[n]].x,u=a[g[n]].y,v=a[g[p]].x,y=a[g[p]].y;if(1E-10>(s-r)*(y-t)-(u-t)*(v-r))m=!1;else{var K=void 0,x=void 0,D=void 0,E=void 0,A=void 0,B=void 0,F=void 0,R=void 0,H=void 0,C=void 0,H=R=F=w=G=void 0,K=v-s,x=y-u,D=r-v,E=t-y,A=s-r,B=u-t;for(m=0;mk)g=d+1;else if(0b&&(b=0);1=b)return b=c[a]-b,a=this.curves[a],b=1-b/a.getLength(),a.getPointAt(b);a++}return null};THREE.CurvePath.prototype.getLength=function(){var a=this.getCurveLengths();return a[a.length-1]}; -THREE.CurvePath.prototype.getCurveLengths=function(){if(this.cacheLengths&&this.cacheLengths.length==this.curves.length)return this.cacheLengths;var a=[],b=0,c,d=this.curves.length;for(c=0;cb?b=h.x:h.xc?c=h.y:h.yd?d=h.z:h.zMath.abs(d.x-c[0].x)&&1E-10>Math.abs(d.y-c[0].y)&&c.splice(c.length-1,1);b&&c.push(c[0]);return c}; -THREE.Path.prototype.toShapes=function(a,b){function c(a){for(var b=[],c=0,d=a.length;cm&&(g=b[f],k=-k,h=b[e],m=-m),!(a.yh.y))if(a.y==g.y){if(a.x==g.x)return!0}else{e=m*(a.x-g.x)-k*(a.y-g.y);if(0==e)return!0;0>e||(d=!d)}}else if(a.y==g.y&&(h.x<=a.x&&a.x<=g.x||g.x<=a.x&&a.x<= -h.x))return!0}return d}var e=function(a){var b,c,d,e,f=[],g=new THREE.Path;b=0;for(c=a.length;bE||E>D)return[];k=n*p-k*q;if(0>k||k>D)return[]}else{if(0d?[]:k==d?f?[]:[g]:a<=d?[g,h]: -[g,n]}function e(a,b,c,d){var e=b.x-a.x,f=b.y-a.y;b=c.x-a.x;c=c.y-a.y;var g=d.x-a.x;d=d.y-a.y;a=e*c-f*b;e=e*d-f*g;return 1E-10f&&(f=d);var g=a+1;g>d&&(g=0);d=e(h[a],h[f],h[g],k[b]);if(!d)return!1; -d=k.length-1;f=b-1;0>f&&(f=d);g=b+1;g>d&&(g=0);return(d=e(k[b],k[f],k[g],h[a]))?!0:!1}function f(a,b){var c,e;for(c=0;cC){console.log("Infinite Loop! Holes left:"+ -n.length+", Probably Hole outside Shape!");break}for(q=B;qh;h++)n=k[h].x+":"+k[h].y, -n=p[n],void 0!==n&&(k[h]=n);return q.concat()},isClockWise:function(a){return 0>THREE.FontUtils.Triangulate.area(a)},b2p0:function(a,b){var c=1-a;return c*c*b},b2p1:function(a,b){return 2*(1-a)*a*b},b2p2:function(a,b){return a*a*b},b2:function(a,b,c,d){return this.b2p0(a,b)+this.b2p1(a,c)+this.b2p2(a,d)},b3p0:function(a,b){var c=1-a;return c*c*c*b},b3p1:function(a,b){var c=1-a;return 3*c*c*a*b},b3p2:function(a,b){return 3*(1-a)*a*a*b},b3p3:function(a,b){return a*a*a*b},b3:function(a,b,c,d,e){return this.b3p0(a, -b)+this.b3p1(a,c)+this.b3p2(a,d)+this.b3p3(a,e)}};THREE.LineCurve=function(a,b){this.v1=a;this.v2=b};THREE.LineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.LineCurve.prototype.getPoint=function(a){var b=this.v2.clone().sub(this.v1);b.multiplyScalar(a).add(this.v1);return b};THREE.LineCurve.prototype.getPointAt=function(a){return this.getPoint(a)};THREE.LineCurve.prototype.getTangent=function(a){return this.v2.clone().sub(this.v1).normalize()}; -THREE.QuadraticBezierCurve=function(a,b,c){this.v0=a;this.v1=b;this.v2=c};THREE.QuadraticBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.QuadraticBezierCurve.prototype.getPoint=function(a){var b=new THREE.Vector2;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);return b}; -THREE.QuadraticBezierCurve.prototype.getTangent=function(a){var b=new THREE.Vector2;b.x=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.y,this.v1.y,this.v2.y);return b.normalize()};THREE.CubicBezierCurve=function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d};THREE.CubicBezierCurve.prototype=Object.create(THREE.Curve.prototype); -THREE.CubicBezierCurve.prototype.getPoint=function(a){var b;b=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);return new THREE.Vector2(b,a)};THREE.CubicBezierCurve.prototype.getTangent=function(a){var b;b=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b=new THREE.Vector2(b,a);b.normalize();return b}; -THREE.SplineCurve=function(a){this.points=void 0==a?[]:a};THREE.SplineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.SplineCurve.prototype.getPoint=function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector2;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);return c}; -THREE.EllipseCurve=function(a,b,c,d,e,f,g){this.aX=a;this.aY=b;this.xRadius=c;this.yRadius=d;this.aStartAngle=e;this.aEndAngle=f;this.aClockwise=g};THREE.EllipseCurve.prototype=Object.create(THREE.Curve.prototype); -THREE.EllipseCurve.prototype.getPoint=function(a){var b=this.aEndAngle-this.aStartAngle;0>b&&(b+=2*Math.PI);b>2*Math.PI&&(b-=2*Math.PI);a=!0===this.aClockwise?this.aEndAngle+(1-a)*(2*Math.PI-b):this.aStartAngle+a*b;b=new THREE.Vector2;b.x=this.aX+this.xRadius*Math.cos(a);b.y=this.aY+this.yRadius*Math.sin(a);return b};THREE.ArcCurve=function(a,b,c,d,e,f){THREE.EllipseCurve.call(this,a,b,c,c,d,e,f)};THREE.ArcCurve.prototype=Object.create(THREE.EllipseCurve.prototype); -THREE.LineCurve3=THREE.Curve.create(function(a,b){this.v1=a;this.v2=b},function(a){var b=new THREE.Vector3;b.subVectors(this.v2,this.v1);b.multiplyScalar(a);b.add(this.v1);return b});THREE.QuadraticBezierCurve3=THREE.Curve.create(function(a,b,c){this.v0=a;this.v1=b;this.v2=c},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);b.z=THREE.Shape.Utils.b2(a,this.v0.z,this.v1.z,this.v2.z);return b}); -THREE.CubicBezierCurve3=THREE.Curve.create(function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);b.y=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b.z=THREE.Shape.Utils.b3(a,this.v0.z,this.v1.z,this.v2.z,this.v3.z);return b}); -THREE.SplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c}); -THREE.ClosedSplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-0;var c=Math.floor(a);a-=c;var c=c+(0a.hierarchy[b].keys[c].time&&(a.hierarchy[b].keys[c].time= -0),void 0!==a.hierarchy[b].keys[c].rot&&!(a.hierarchy[b].keys[c].rot instanceof THREE.Quaternion)){var d=a.hierarchy[b].keys[c].rot;a.hierarchy[b].keys[c].rot=(new THREE.Quaternion).fromArray(d)}if(a.hierarchy[b].keys.length&&void 0!==a.hierarchy[b].keys[0].morphTargets){d={};for(c=0;cd;d++){for(var e=this.keyTypes[d],f=this.data.hierarchy[a].keys[0],g=this.getNextKeyWith(e,a,1);g.timef.index;)f=g,g=this.getNextKeyWith(e,a,g.index+1);c.prevKey[e]=f;c.nextKey[e]=g}}}; -THREE.Animation.prototype.resetBlendWeights=function(){for(var a=0,b=this.hierarchy.length;aa.length-2?q:q+1;c[3]=q>a.length-3?q:q+2;q=a[c[0]];r=a[c[1]];t=a[c[2]];s=a[c[3]];c=e*e;m=e*c;d[0]=f(q[0],r[0],t[0],s[0],e,c,m);d[1]=f(q[1],r[1],t[1],s[1],e,c,m);d[2]=f(q[2],r[2],t[2],s[2],e,c,m);return d},f=function(a,b,c,d,e,f,m){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*m+ -(-3*(b-c)-2*a-d)*f+a*e+b};return function(f){if(!1!==this.isPlaying&&(this.currentTime+=f*this.timeScale,0!==this.weight)){f=this.data.length;if(this.currentTime>f||0>this.currentTime)if(this.loop)this.currentTime%=f,0>this.currentTime&&(this.currentTime+=f),this.reset();else{this.stop();return}f=0;for(var h=this.hierarchy.length;fq;q++){var m=this.keyTypes[q],r=n.prevKey[m],t=n.nextKey[m]; -if(0this.timeScale&&r.time>=this.currentTime){r=this.data.hierarchy[f].keys[0];for(t=this.getNextKeyWith(m,f,1);t.timer.index;)r=t,t=this.getNextKeyWith(m,f,t.index+1);n.prevKey[m]=r;n.nextKey[m]=t}k.matrixAutoUpdate=!0;k.matrixWorldNeedsUpdate=!0;var s=(this.currentTime-r.time)/(t.time-r.time),u=r[m],v=t[m];0>s&&(s=0);1a&&(this.currentTime%=a);this.currentTime=Math.min(this.currentTime,a);a=0;for(var b=this.hierarchy.length;af.index;)f=g,g=e[f.index+1];d.prevKey= -f;d.nextKey=g}g.time>=this.currentTime?f.interpolate(g,this.currentTime):f.interpolate(g,g.time);this.data.hierarchy[a].node.updateMatrix();c.matrixWorldNeedsUpdate=!0}}}};THREE.KeyFrameAnimation.prototype.getNextKeyWith=function(a,b,c){b=this.data.hierarchy[b].keys;for(c%=b.length;cthis.duration&&(this.currentTime%=this.duration);this.currentTime=Math.min(this.currentTime,this.duration);c=this.duration/this.frames;var d=Math.floor(this.currentTime/c);d!=b&&(this.mesh.morphTargetInfluences[a]=0,this.mesh.morphTargetInfluences[b]=1,this.mesh.morphTargetInfluences[d]= -0,a=b,b=d);this.mesh.morphTargetInfluences[d]=this.currentTime%c/c;this.mesh.morphTargetInfluences[a]=1-this.mesh.morphTargetInfluences[d]}}}()}; -THREE.BoxGeometry=function(a,b,c,d,e,f){function g(a,b,c,d,e,f,g,s){var u,v=h.widthSegments,y=h.heightSegments,G=e/2,w=f/2,K=h.vertices.length;if("x"===a&&"y"===b||"y"===a&&"x"===b)u="z";else if("x"===a&&"z"===b||"z"===a&&"x"===b)u="y",y=h.depthSegments;else if("z"===a&&"y"===b||"y"===a&&"z"===b)u="x",v=h.depthSegments;var x=v+1,D=y+1,E=e/v,A=f/y,B=new THREE.Vector3;B[u]=0=d)return new THREE.Vector2(c,a);d=Math.sqrt(d/2)}else a=!1,1E-10d?-1E-10>f&&(a=!0):Math.sign(e)== -Math.sign(g)&&(a=!0),a?(c=-e,a=d,d=Math.sqrt(h)):(c=d,a=e,d=Math.sqrt(h/2));return new THREE.Vector2(c/d,a/d)}function e(a,b){var c,d;for(P=a.length;0<=--P;){c=P;d=P-1;0>d&&(d=a.length-1);for(var e=0,f=r+2*p,e=0;eMath.abs(b.y-c.y)?[new THREE.Vector2(b.x,1-b.z),new THREE.Vector2(c.x,1-c.z),new THREE.Vector2(d.x,1-d.z),new THREE.Vector2(e.x,1-e.z)]:[new THREE.Vector2(b.y,1-b.z),new THREE.Vector2(c.y,1-c.z),new THREE.Vector2(d.y, -1-d.z),new THREE.Vector2(e.y,1-e.z)]}};THREE.ShapeGeometry=function(a,b){THREE.Geometry.call(this);this.type="ShapeGeometry";!1===a instanceof Array&&(a=[a]);this.addShapeList(a,b);this.computeFaceNormals()};THREE.ShapeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ShapeGeometry.prototype.addShapeList=function(a,b){for(var c=0,d=a.length;cc&&1===a.x&&(a=new THREE.Vector2(a.x-1,a.y));0===b.x&&0===b.z&&(a=new THREE.Vector2(c/2/Math.PI+.5, -a.y));return a.clone()}THREE.Geometry.call(this);this.type="PolyhedronGeometry";this.parameters={vertices:a,indices:b,radius:c,detail:d};c=c||1;d=d||0;for(var k=this,n=0,p=a.length;nr&&(.2>d&&(b[0].x+=1),.2>a&&(b[1].x+=1),.2>q&&(b[2].x+=1));n=0;for(p=this.vertices.length;nc.y?this.quaternion.set(1,0,0,0):(a.set(c.z,0,-c.x).normalize(),b=Math.acos(c.y),this.quaternion.setFromAxisAngle(a,b))}}(); -THREE.ArrowHelper.prototype.setLength=function(a,b,c){void 0===b&&(b=.2*a);void 0===c&&(c=.2*b);this.line.scale.set(1,a,1);this.line.updateMatrix();this.cone.scale.set(c,b,c);this.cone.position.y=a;this.cone.updateMatrix()};THREE.ArrowHelper.prototype.setColor=function(a){this.line.material.color.set(a);this.cone.material.color.set(a)}; -THREE.BoxHelper=function(a){var b=new THREE.BufferGeometry;b.addAttribute("position",new THREE.BufferAttribute(new Float32Array(72),3));THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:16776960}),THREE.LinePieces);void 0!==a&&this.update(a)};THREE.BoxHelper.prototype=Object.create(THREE.Line.prototype); -THREE.BoxHelper.prototype.update=function(a){var b=a.geometry;null===b.boundingBox&&b.computeBoundingBox();var c=b.boundingBox.min,b=b.boundingBox.max,d=this.geometry.attributes.position.array;d[0]=b.x;d[1]=b.y;d[2]=b.z;d[3]=c.x;d[4]=b.y;d[5]=b.z;d[6]=c.x;d[7]=b.y;d[8]=b.z;d[9]=c.x;d[10]=c.y;d[11]=b.z;d[12]=c.x;d[13]=c.y;d[14]=b.z;d[15]=b.x;d[16]=c.y;d[17]=b.z;d[18]=b.x;d[19]=c.y;d[20]=b.z;d[21]=b.x;d[22]=b.y;d[23]=b.z;d[24]=b.x;d[25]=b.y;d[26]=c.z;d[27]=c.x;d[28]=b.y;d[29]=c.z;d[30]=c.x;d[31]=b.y; -d[32]=c.z;d[33]=c.x;d[34]=c.y;d[35]=c.z;d[36]=c.x;d[37]=c.y;d[38]=c.z;d[39]=b.x;d[40]=c.y;d[41]=c.z;d[42]=b.x;d[43]=c.y;d[44]=c.z;d[45]=b.x;d[46]=b.y;d[47]=c.z;d[48]=b.x;d[49]=b.y;d[50]=b.z;d[51]=b.x;d[52]=b.y;d[53]=c.z;d[54]=c.x;d[55]=b.y;d[56]=b.z;d[57]=c.x;d[58]=b.y;d[59]=c.z;d[60]=c.x;d[61]=c.y;d[62]=b.z;d[63]=c.x;d[64]=c.y;d[65]=c.z;d[66]=b.x;d[67]=c.y;d[68]=b.z;d[69]=b.x;d[70]=c.y;d[71]=c.z;this.geometry.attributes.position.needsUpdate=!0;this.geometry.computeBoundingSphere();this.matrix=a.matrixWorld; -this.matrixAutoUpdate=!1};THREE.BoundingBoxHelper=function(a,b){var c=void 0!==b?b:8947848;this.object=a;this.box=new THREE.Box3;THREE.Mesh.call(this,new THREE.BoxGeometry(1,1,1),new THREE.MeshBasicMaterial({color:c,wireframe:!0}))};THREE.BoundingBoxHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.BoundingBoxHelper.prototype.update=function(){this.box.setFromObject(this.object);this.box.size(this.scale);this.box.center(this.position)}; -THREE.CameraHelper=function(a){function b(a,b,d){c(a,d);c(b,d)}function c(a,b){d.vertices.push(new THREE.Vector3);d.colors.push(new THREE.Color(b));void 0===f[a]&&(f[a]=[]);f[a].push(d.vertices.length-1)}var d=new THREE.Geometry,e=new THREE.LineBasicMaterial({color:16777215,vertexColors:THREE.FaceColors}),f={};b("n1","n2",16755200);b("n2","n4",16755200);b("n4","n3",16755200);b("n3","n1",16755200);b("f1","f2",16755200);b("f2","f4",16755200);b("f4","f3",16755200);b("f3","f1",16755200);b("n1","f1",16755200); -b("n2","f2",16755200);b("n3","f3",16755200);b("n4","f4",16755200);b("p","n1",16711680);b("p","n2",16711680);b("p","n3",16711680);b("p","n4",16711680);b("u1","u2",43775);b("u2","u3",43775);b("u3","u1",43775);b("c","t",16777215);b("p","c",3355443);b("cn1","cn2",3355443);b("cn3","cn4",3355443);b("cf1","cf2",3355443);b("cf3","cf4",3355443);THREE.Line.call(this,d,e,THREE.LinePieces);this.camera=a;this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.pointMap=f;this.update()}; -THREE.CameraHelper.prototype=Object.create(THREE.Line.prototype); -THREE.CameraHelper.prototype.update=function(){var a,b,c=new THREE.Vector3,d=new THREE.Camera,e=function(e,g,h,k){c.set(g,h,k).unproject(d);e=b[e];if(void 0!==e)for(g=0,h=e.length;gt;t++){d[0]=r[g[t]];d[1]=r[g[(t+1)%3]];d.sort(f);var s=d.toString();void 0===e[s]?(e[s]={vert1:d[0],vert2:d[1],face1:q,face2:void 0},p++):e[s].face2=q}d=new Float32Array(6*p);f=0;for(s in e)if(g=e[s],void 0===g.face2|| -.9999>k[g.face1].normal.dot(k[g.face2].normal))p=n[g.vert1],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z,p=n[g.vert2],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3));THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.EdgesHelper.prototype=Object.create(THREE.Line.prototype); -THREE.FaceNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;a=void 0!==c?c:16776960;d=void 0!==d?d:1;b=new THREE.Geometry;c=0;for(var e=this.object.geometry.faces.length;cb;b++)a.faces[b].color=this.colors[4>b?0:1];b=new THREE.MeshBasicMaterial({vertexColors:THREE.FaceColors,wireframe:!0});this.lightSphere=new THREE.Mesh(a,b);this.add(this.lightSphere); -this.update()};THREE.HemisphereLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.HemisphereLightHelper.prototype.dispose=function(){this.lightSphere.geometry.dispose();this.lightSphere.material.dispose()}; -THREE.HemisphereLightHelper.prototype.update=function(){var a=new THREE.Vector3;return function(){this.colors[0].copy(this.light.color).multiplyScalar(this.light.intensity);this.colors[1].copy(this.light.groundColor).multiplyScalar(this.light.intensity);this.lightSphere.lookAt(a.setFromMatrixPosition(this.light.matrixWorld).negate());this.lightSphere.geometry.colorsNeedUpdate=!0}}(); -THREE.PointLightHelper=function(a,b){this.light=a;this.light.updateMatrixWorld();var c=new THREE.SphereGeometry(b,4,2),d=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);THREE.Mesh.call(this,c,d);this.matrix=this.light.matrixWorld;this.matrixAutoUpdate=!1};THREE.PointLightHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.PointLightHelper.prototype.dispose=function(){this.geometry.dispose();this.material.dispose()}; -THREE.PointLightHelper.prototype.update=function(){this.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)}; -THREE.SkeletonHelper=function(a){this.bones=this.getBoneList(a);for(var b=new THREE.Geometry,c=0;cs;s++){d[0]=t[g[s]];d[1]=t[g[(s+1)%3]];d.sort(f);var u=d.toString();void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++)}d=new Float32Array(6*p);m=0;for(r=p;ms;s++)p= -k[q[2*m+s]],g=6*m+3*s,d[g+0]=p.x,d[g+1]=p.y,d[g+2]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3))}else if(a.geometry instanceof THREE.BufferGeometry){if(void 0!==a.geometry.attributes.index){k=a.geometry.attributes.position.array;r=a.geometry.attributes.index.array;n=a.geometry.drawcalls;p=0;0===n.length&&(n=[{count:r.length,index:0,start:0}]);for(var q=new Uint32Array(2*r.length),t=0,v=n.length;ts;s++)d[0]= -g+r[m+s],d[1]=g+r[m+(s+1)%3],d.sort(f),u=d.toString(),void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++);d=new Float32Array(6*p);m=0;for(r=p;ms;s++)g=6*m+3*s,p=3*q[2*m+s],d[g+0]=k[p],d[g+1]=k[p+1],d[g+2]=k[p+2]}else for(k=a.geometry.attributes.position.array,p=k.length/3,q=p/3,d=new Float32Array(6*p),m=0,r=q;ms;s++)g=18*m+6*s,q=9*m+3*s,d[g+0]=k[q],d[g+1]=k[q+1],d[g+2]=k[q+2],p=9*m+(s+1)%3*3,d[g+3]=k[p],d[g+4]=k[p+1],d[g+5]=k[p+2];h.addAttribute("position",new THREE.BufferAttribute(d, -3))}THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.WireframeHelper.prototype=Object.create(THREE.Line.prototype);THREE.ImmediateRenderObject=function(){THREE.Object3D.call(this);this.render=function(a){}};THREE.ImmediateRenderObject.prototype=Object.create(THREE.Object3D.prototype); -THREE.MorphBlendMesh=function(a,b){THREE.Mesh.call(this,a,b);this.animationsMap={};this.animationsList=[];var c=this.geometry.morphTargets.length;this.createAnimation("__default",0,c-1,c/1);this.setAnimationWeight("__default",1)};THREE.MorphBlendMesh.prototype=Object.create(THREE.Mesh.prototype); -THREE.MorphBlendMesh.prototype.createAnimation=function(a,b,c,d){b={startFrame:b,endFrame:c,length:c-b+1,fps:d,duration:(c-b)/d,lastFrame:0,currentFrame:0,active:!1,time:0,direction:1,weight:1,directionBackwards:!1,mirroredLoop:!1};this.animationsMap[a]=b;this.animationsList.push(b)}; -THREE.MorphBlendMesh.prototype.autoCreateAnimations=function(a){for(var b=/([a-z]+)_?(\d+)/,c,d={},e=this.geometry,f=0,g=e.morphTargets.length;fh.end&&(h.end=f);c||(c=k)}}for(k in d)h=d[k],this.createAnimation(k,h.start,h.end,a);this.firstAnimation=c}; -THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=this.animationsMap[a])a.direction=1,a.directionBackwards=!1};THREE.MorphBlendMesh.prototype.setAnimationDirectionBackward=function(a){if(a=this.animationsMap[a])a.direction=-1,a.directionBackwards=!0};THREE.MorphBlendMesh.prototype.setAnimationFPS=function(a,b){var c=this.animationsMap[a];c&&(c.fps=b,c.duration=(c.end-c.start)/c.fps)}; -THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b}; -THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):console.warn("animation["+a+"] undefined")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1}; -THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;bd.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g=d.weight; -f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; diff --git a/plugins/Sidebar/media-globe/world.jpg b/plugins/Sidebar/media-globe/world.jpg deleted file mode 100644 index 222bd939..00000000 Binary files a/plugins/Sidebar/media-globe/world.jpg and /dev/null differ diff --git a/plugins/Sidebar/media/Class.coffee b/plugins/Sidebar/media/Class.coffee deleted file mode 100644 index d62ab25c..00000000 --- a/plugins/Sidebar/media/Class.coffee +++ /dev/null @@ -1,23 +0,0 @@ -class Class - trace: true - - log: (args...) -> - return unless @trace - return if typeof console is 'undefined' - args.unshift("[#{@.constructor.name}]") - console.log(args...) - @ - - logStart: (name, args...) -> - return unless @trace - @logtimers or= {} - @logtimers[name] = +(new Date) - @log "#{name}", args..., "(started)" if args.length > 0 - @ - - logEnd: (name, args...) -> - ms = +(new Date)-@logtimers[name] - @log "#{name}", args..., "(Done in #{ms}ms)" - @ - -window.Class = Class \ No newline at end of file diff --git a/plugins/Sidebar/media/Internals.coffee b/plugins/Sidebar/media/Internals.coffee deleted file mode 100644 index 484ecdb7..00000000 --- a/plugins/Sidebar/media/Internals.coffee +++ /dev/null @@ -1,60 +0,0 @@ -class Internals extends Class - constructor: (@sidebar) -> - @tag = null - @opened = false - if window.top.location.hash == "#internals" - setTimeout (=> @open()), 10 - - createHtmltag: -> - @when_loaded = $.Deferred() - if not @container - @container = $(""" -
    -
    - - """) - @container.appendTo(document.body) - @tag = @container.find(".internals") - - open: => - @createHtmltag() - @sidebar.fixbutton_targety = @sidebar.page_height - @stopDragY() - - onOpened: => - @sidebar.onClosed() - @log "onOpened" - - onClosed: => - $(document.body).removeClass("body-internals") - - stopDragY: => - # Animate sidebar and iframe - if @sidebar.fixbutton_targety == @sidebar.fixbutton_inity - # Closed - targety = 0 - @opened = false - else - # Opened - targety = @sidebar.fixbutton_targety - @sidebar.fixbutton_inity - @onOpened() - @opened = true - - # Revent sidebar transitions - if @tag - @tag.css("transition", "0.5s ease-out") - @tag.css("transform", "translateY(#{targety}px)").one transitionEnd, => - @tag.css("transition", "") - if not @opened - @log "cleanup" - # Revert body transformations - @log "stopdrag", "opened:", @opened, targety - if not @opened - @onClosed() - -window.Internals = Internals \ No newline at end of file diff --git a/plugins/Sidebar/media/Internals.css b/plugins/Sidebar/media/Internals.css deleted file mode 100644 index 36b2489e..00000000 --- a/plugins/Sidebar/media/Internals.css +++ /dev/null @@ -1,17 +0,0 @@ -.internals-container { width: 100%; z-index: 998; position: absolute; top: -100vh; } -.internals { background-color: #EEE; height: 100vh; transform: translateY(0px); } -.internals-middle {height: 0px; top: 50%; position: absolute; width: 100%; left: 50%; } - -.internals .mynode { - border: 0.5px solid #aaa; width: 50px; height: 50px; transform: rotateZ(45deg); margin-top: -25px; margin-left: -25px; - opacity: 1; display: inline-block; background-color: #EEE; z-index: 9; position: absolute; outline: 5px solid #EEE; -} -.internals .peers { width: 0px; height: 0px; position: absolute; left: -20px; top: -20px; text-align: center; } -.internals .peer { left: 0px; top: 0px; position: absolute; } -.internals .peer .icon { width: 20px; height: 20px; padding: 10px; display: inline-block; text-decoration: none; left: 200px; position: absolute; color: #666; } -.internals .peer .icon:before { content: "\25BC"; position: absolute; margin-top: 3px; margin-left: -1px; opacity: 0; transition: all 0.3s } -.internals .peer .icon:hover:before { opacity: 1; transition: none } -.internals .peer .line { - width: 187px; border-top: 1px solid #CCC; position: absolute; top: 20px; left: 20px; - transform: rotateZ(334deg); transform-origin: bottom left; -} \ No newline at end of file diff --git a/plugins/Sidebar/media/Menu.coffee b/plugins/Sidebar/media/Menu.coffee deleted file mode 100644 index 3e19fd9f..00000000 --- a/plugins/Sidebar/media/Menu.coffee +++ /dev/null @@ -1,49 +0,0 @@ -class Menu - constructor: (@button) -> - @elem = $(".menu.template").clone().removeClass("template") - @elem.appendTo("body") - @items = [] - - show: -> - if window.visible_menu and window.visible_menu.button[0] == @button[0] # Same menu visible then hide it - window.visible_menu.hide() - @hide() - else - button_pos = @button.offset() - left = button_pos.left - @elem.css({"top": button_pos.top+@button.outerHeight(), "left": left}) - @button.addClass("menu-active") - @elem.addClass("visible") - if @elem.position().left + @elem.width() + 20 > window.innerWidth - @elem.css("left", window.innerWidth - @elem.width() - 20) - if window.visible_menu then window.visible_menu.hide() - window.visible_menu = @ - - - hide: -> - @elem.removeClass("visible") - @button.removeClass("menu-active") - window.visible_menu = null - - - addItem: (title, cb) -> - item = $(".menu-item.template", @elem).clone().removeClass("template") - item.html(title) - item.on "click", => - if not cb(item) - @hide() - return false - item.appendTo(@elem) - @items.push item - return item - - - log: (args...) -> - console.log "[Menu]", args... - -window.Menu = Menu - -# Hide menu on outside click -$("body").on "click", (e) -> - if window.visible_menu and e.target != window.visible_menu.button[0] and $(e.target).parent()[0] != window.visible_menu.elem[0] - window.visible_menu.hide() diff --git a/plugins/Sidebar/media/Menu.css b/plugins/Sidebar/media/Menu.css deleted file mode 100644 index e2afa16e..00000000 --- a/plugins/Sidebar/media/Menu.css +++ /dev/null @@ -1,19 +0,0 @@ -.menu { - background-color: white; padding: 10px 0px; position: absolute; top: 0px; left: 0px; max-height: 0px; overflow: hidden; transform: translate(0px, -30px); pointer-events: none; - box-shadow: 0px 2px 8px rgba(0,0,0,0.3); border-radius: 2px; opacity: 0; transition: opacity 0.2s ease-out, transform 1s ease-out, max-height 0.2s ease-in-out; -} -.menu.visible { opacity: 1; max-height: 350px; transform: translate(0px, 0px); transition: opacity 0.1s ease-out, transform 0.3s ease-out, max-height 0.3s ease-in-out; pointer-events: all } - -.menu-item { display: block; text-decoration: none; color: black; padding: 6px 24px; transition: all 0.2s; border-bottom: none; font-weight: normal; padding-left: 30px; } -.menu-item-separator { margin-top: 5px; border-top: 1px solid #eee } - -.menu-item:hover { background-color: #F6F6F6; transition: none; color: inherit; border: none } -.menu-item:active, .menu-item:focus { background-color: #AF3BFF; color: white; transition: none } -.menu-item.selected:before { - content: "L"; display: inline-block; transform: rotateZ(45deg) scaleX(-1); - font-weight: bold; position: absolute; margin-left: -17px; font-size: 12px; margin-top: 2px; -} - -@media only screen and (max-width: 800px) { -.menu, .menu.visible { position: absolute; left: unset !important; right: 20px; } -} \ No newline at end of file diff --git a/plugins/Sidebar/media/RateLimit.coffee b/plugins/Sidebar/media/RateLimit.coffee deleted file mode 100644 index 17c67433..00000000 --- a/plugins/Sidebar/media/RateLimit.coffee +++ /dev/null @@ -1,14 +0,0 @@ -limits = {} -call_after_interval = {} -window.RateLimit = (interval, fn) -> - if not limits[fn] - call_after_interval[fn] = false - fn() # First call is not delayed - limits[fn] = setTimeout (-> - if call_after_interval[fn] - fn() - delete limits[fn] - delete call_after_interval[fn] - ), interval - else # Called within iterval, delay the call - call_after_interval[fn] = true diff --git a/plugins/Sidebar/media/Scrollable.js b/plugins/Sidebar/media/Scrollable.js deleted file mode 100644 index 689a5719..00000000 --- a/plugins/Sidebar/media/Scrollable.js +++ /dev/null @@ -1,91 +0,0 @@ -/* via http://jsfiddle.net/elGrecode/00dgurnn/ */ - -window.initScrollable = function () { - - var scrollContainer = document.querySelector('.scrollable'), - scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'), - scrollContent = document.querySelector('.scrollable .content'), - contentPosition = 0, - scrollerBeingDragged = false, - scroller, - topPosition, - scrollerHeight; - - function calculateScrollerHeight() { - // *Calculation of how tall scroller should be - var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight; - if (visibleRatio == 1) - scroller.style.display = "none"; - else - scroller.style.display = "block"; - return visibleRatio * scrollContainer.offsetHeight; - } - - function moveScroller(evt) { - // Move Scroll bar to top offset - var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight; - topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box - scroller.style.top = topPosition + 'px'; - } - - function startDrag(evt) { - normalizedPosition = evt.pageY; - contentPosition = scrollContentWrapper.scrollTop; - scrollerBeingDragged = true; - window.addEventListener('mousemove', scrollBarScroll); - return false; - } - - function stopDrag(evt) { - scrollerBeingDragged = false; - window.removeEventListener('mousemove', scrollBarScroll); - } - - function scrollBarScroll(evt) { - if (scrollerBeingDragged === true) { - evt.preventDefault(); - var mouseDifferential = evt.pageY - normalizedPosition; - var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight); - scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent; - } - } - - function updateHeight() { - scrollerHeight = calculateScrollerHeight() - 10; - scroller.style.height = scrollerHeight + 'px'; - } - - function createScroller() { - // *Creates scroller element and appends to '.scrollable' div - // create scroller element - scroller = document.createElement("div"); - scroller.className = 'scroller'; - - // determine how big scroller should be based on content - scrollerHeight = calculateScrollerHeight() - 10; - - if (scrollerHeight / scrollContainer.offsetHeight < 1) { - // *If there is a need to have scroll bar based on content size - scroller.style.height = scrollerHeight + 'px'; - - // append scroller to scrollContainer div - scrollContainer.appendChild(scroller); - - // show scroll path divot - scrollContainer.className += ' showScroll'; - - // attach related draggable listeners - scroller.addEventListener('mousedown', startDrag); - window.addEventListener('mouseup', stopDrag); - } - - } - - createScroller(); - - - // *** Listeners *** - scrollContentWrapper.addEventListener('scroll', moveScroller); - - return updateHeight; -}; \ No newline at end of file diff --git a/plugins/Sidebar/media/Scrollbable.css b/plugins/Sidebar/media/Scrollbable.css deleted file mode 100644 index 6e3e0b6a..00000000 --- a/plugins/Sidebar/media/Scrollbable.css +++ /dev/null @@ -1,44 +0,0 @@ -.scrollable { - overflow: hidden; -} - -.scrollable.showScroll::after { - position: absolute; - content: ''; - top: 5%; - right: 7px; - height: 90%; - width: 3px; - background: rgba(224, 224, 255, .3); -} - -.scrollable .content-wrapper { - width: 100%; - height: 100%; - padding-right: 50%; - overflow-y: scroll; -} -.scroller { - margin-top: 5px; - z-index: 5; - cursor: pointer; - position: absolute; - width: 7px; - border-radius: 5px; - background: #3A3A3A; - top: 0px; - left: 395px; - -webkit-transition: top .08s; - -moz-transition: top .08s; - -ms-transition: top .08s; - -o-transition: top .08s; - transition: top .08s; -} -.scroller { - -webkit-touch-callout: none; - -webkit-user-select: none; - -khtml-user-select: none; - -moz-user-select: none; - -ms-user-select: none; - user-select: none; -} diff --git a/plugins/Sidebar/media/Sidebar.coffee b/plugins/Sidebar/media/Sidebar.coffee deleted file mode 100644 index 938fe41b..00000000 --- a/plugins/Sidebar/media/Sidebar.coffee +++ /dev/null @@ -1,617 +0,0 @@ -class Sidebar extends Class - constructor: (@wrapper) -> - @tag = null - @container = null - @opened = false - @width = 410 - @internals = new Internals(@) - @fixbutton = $(".fixbutton") - @fixbutton_addx = 0 - @fixbutton_addy = 0 - @fixbutton_initx = 0 - @fixbutton_inity = 15 - @fixbutton_targetx = 0 - @move_lock = null - @page_width = $(window).width() - @page_height = $(window).height() - @frame = $("#inner-iframe") - @initFixbutton() - @dragStarted = 0 - @globe = null - @preload_html = null - - @original_set_site_info = @wrapper.setSiteInfo # We going to override this, save the original - - # Start in opened state for debugging - if false - @startDrag() - @moved() - @fixbutton_targetx = @fixbutton_initx - @width - @stopDrag() - - - initFixbutton: -> - - # Detect dragging - @fixbutton.on "mousedown touchstart", (e) => - if e.button > 0 # Right or middle click - return - e.preventDefault() - - # Disable previous listeners - @fixbutton.off "click touchend touchcancel" - @fixbutton.off "mousemove touchmove" - - # Make sure its not a click - @dragStarted = (+ new Date) - @fixbutton.one "mousemove touchmove", (e) => - mousex = e.pageX - mousey = e.pageY - if not mousex - mousex = e.originalEvent.touches[0].pageX - mousey = e.originalEvent.touches[0].pageY - - @fixbutton_addx = @fixbutton.offset().left - mousex - @fixbutton_addy = @fixbutton.offset().top - mousey - @startDrag() - @fixbutton.parent().on "click touchend touchcancel", (e) => - if (+ new Date) - @dragStarted < 100 - window.top.location = @fixbutton.find(".fixbutton-bg").attr("href") - @stopDrag() - @resized() - $(window).on "resize", @resized - - resized: => - @page_width = $(window).width() - @page_height = $(window).height() - @fixbutton_initx = @page_width - 75 # Initial x position - if @opened - @fixbutton.css - left: @fixbutton_initx - @width - else - @fixbutton.css - left: @fixbutton_initx - - # Start dragging the fixbutton - startDrag: -> - @move_lock = "x" # Temporary until internals not finished - @log "startDrag" - @fixbutton_targetx = @fixbutton_initx # Fallback x position - - @fixbutton.addClass("dragging") - - # Fullscreen drag bg to capture mouse events over iframe - $("
    ").appendTo(document.body) - - # IE position wrap fix - if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0 - @fixbutton.css("pointer-events", "none") - - # Don't go to homepage - @fixbutton.one "click", (e) => - @stopDrag() - @fixbutton.removeClass("dragging") - moved_x = Math.abs(@fixbutton.offset().left - @fixbutton_initx) - moved_y = Math.abs(@fixbutton.offset().top - @fixbutton_inity) - if moved_x > 5 or moved_y > 10 - # If moved more than some pixel the button then don't go to homepage - e.preventDefault() - - # Animate drag - @fixbutton.parents().on "mousemove touchmove", @animDrag - @fixbutton.parents().on "mousemove touchmove" ,@waitMove - - # Stop dragging listener - @fixbutton.parents().one "mouseup touchend touchcancel", (e) => - e.preventDefault() - @stopDrag() - - - # Wait for moving the fixbutton - waitMove: (e) => - document.body.style.perspective = "1000px" - document.body.style.height = "100%" - document.body.style.willChange = "perspective" - document.documentElement.style.height = "100%" - #$(document.body).css("backface-visibility", "hidden").css("perspective", "1000px").css("height", "900px") - # $("iframe").css("backface-visibility", "hidden") - - moved_x = Math.abs(parseInt(@fixbutton[0].style.left) - @fixbutton_targetx) - moved_y = Math.abs(parseInt(@fixbutton[0].style.top) - @fixbutton_targety) - if moved_x > 5 and (+ new Date) - @dragStarted + moved_x > 50 - @moved("x") - @fixbutton.stop().animate {"top": @fixbutton_inity}, 1000 - @fixbutton.parents().off "mousemove touchmove" ,@waitMove - - else if moved_y > 5 and (+ new Date) - @dragStarted + moved_y > 50 - @moved("y") - @fixbutton.parents().off "mousemove touchmove" ,@waitMove - - moved: (direction) -> - @log "Moved", direction - @move_lock = direction - if direction == "y" - $(document.body).addClass("body-internals") - return @internals.createHtmltag() - @createHtmltag() - $(document.body).addClass("body-sidebar") - @container.on "mousedown touchend touchcancel", (e) => - if e.target != e.currentTarget - return true - @log "closing" - if $(document.body).hasClass("body-sidebar") - @close() - return true - - $(window).off "resize" - $(window).on "resize", => - $(document.body).css "height", $(window).height() - @scrollable() - @resized() - - # Override setsiteinfo to catch changes - @wrapper.setSiteInfo = (site_info) => - @setSiteInfo(site_info) - @original_set_site_info.apply(@wrapper, arguments) - - # Preload world.jpg - img = new Image(); - img.src = "/uimedia/globe/world.jpg"; - - setSiteInfo: (site_info) -> - RateLimit 1500, => - @updateHtmlTag() - RateLimit 30000, => - @displayGlobe() - - # Create the sidebar html tag - createHtmltag: -> - @when_loaded = $.Deferred() - if not @container - @container = $(""" - - """) - @container.appendTo(document.body) - @tag = @container.find(".sidebar") - @updateHtmlTag() - @scrollable = window.initScrollable() - - - updateHtmlTag: -> - if @preload_html - @setHtmlTag(@preload_html) - @preload_html = null - else - @wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag - - setHtmlTag: (res) => - if @tag.find(".content").children().length == 0 # First update - @log "Creating content" - @container.addClass("loaded") - morphdom(@tag.find(".content")[0], '
    '+res+'
    ') - # @scrollable() - @when_loaded.resolve() - - else # Not first update, patch the html to keep unchanged dom elements - morphdom @tag.find(".content")[0], '
    '+res+'
    ', { - onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state - if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0 - return false - else - return true - } - - # Save and forgot privatekey for site signing - @tag.find("#privatekey-add").off("click, touchend").on "click touchend", (e) => - @wrapper.displayPrompt "Enter your private key:", "password", "Save", "", (privatekey) => - @wrapper.ws.cmd "userSetSitePrivatekey", [privatekey], (res) => - @wrapper.notifications.add "privatekey", "done", "Private key saved for site signing", 5000 - return false - - @tag.find("#privatekey-forgot").off("click, touchend").on "click touchend", (e) => - @wrapper.displayConfirm "Remove saved private key for this site?", "Forgot", (res) => - if not res - return false - @wrapper.ws.cmd "userSetSitePrivatekey", [""], (res) => - @wrapper.notifications.add "privatekey", "done", "Saved private key removed", 5000 - return false - - - - animDrag: (e) => - mousex = e.pageX - mousey = e.pageY - if not mousex and e.originalEvent.touches - mousex = e.originalEvent.touches[0].pageX - mousey = e.originalEvent.touches[0].pageY - - overdrag = @fixbutton_initx - @width - mousex - if overdrag > 0 # Overdragged - overdrag_percent = 1 + overdrag/300 - mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent) - targetx = @fixbutton_initx - mousex - @fixbutton_addx - targety = @fixbutton_inity - mousey - @fixbutton_addy - - if @move_lock == "x" - targety = @fixbutton_inity - else if @move_lock == "y" - targetx = @fixbutton_initx - - if not @move_lock or @move_lock == "x" - @fixbutton[0].style.left = (mousex + @fixbutton_addx) + "px" - if @tag - @tag[0].style.transform = "translateX(#{0 - targetx}px)" - - if not @move_lock or @move_lock == "y" - @fixbutton[0].style.top = (mousey + @fixbutton_addy) + "px" - if @internals.tag - @internals.tag[0].style.transform = "translateY(#{0 - targety}px)" - - #if @move_lock == "x" - # @fixbutton[0].style.left = "#{@fixbutton_targetx} px" - #@fixbutton[0].style.top = "#{@fixbutton_inity}px" - #if @move_lock == "y" - # @fixbutton[0].style.top = "#{@fixbutton_targety} px" - - # Check if opened - if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9) - @fixbutton_targetx = @fixbutton_initx - @width # Make it opened - else - @fixbutton_targetx = @fixbutton_initx - - if (not @internals.opened and 0 - targety > @page_height/10) or (@internals.opened and 0 - targety > @page_height*0.95) - @fixbutton_targety = @page_height - @fixbutton_inity - 50 - else - @fixbutton_targety = @fixbutton_inity - - - # Stop dragging the fixbutton - stopDrag: -> - @fixbutton.parents().off "mousemove touchmove" - @fixbutton.off "mousemove touchmove" - @fixbutton.css("pointer-events", "") - $(".drag-bg").remove() - if not @fixbutton.hasClass("dragging") - return - @fixbutton.removeClass("dragging") - - # Move back to initial position - if @fixbutton_targetx != @fixbutton.offset().left - # Animate fixbutton - if @move_lock == "y" - top = @fixbutton_targety - left = @fixbutton_initx - if @move_lock == "x" - top = @fixbutton_inity - left = @fixbutton_targetx - @fixbutton.stop().animate {"left": left, "top": top}, 500, "easeOutBack", => - # Switch back to auto align - if @fixbutton_targetx == @fixbutton_initx # Closed - @fixbutton.css("left", "auto") - else # Opened - @fixbutton.css("left", left) - - $(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status - - @stopDragX() - @internals.stopDragY() - @move_lock = null - - stopDragX: -> - # Animate sidebar and iframe - if @fixbutton_targetx == @fixbutton_initx or @move_lock == "y" - # Closed - targetx = 0 - @opened = false - else - # Opened - targetx = @width - if @opened - @onOpened() - else - @when_loaded.done => - @onOpened() - @opened = true - - # Revent sidebar transitions - if @tag - @tag.css("transition", "0.4s ease-out") - @tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, => - @tag.css("transition", "") - if not @opened - @container.remove() - @container = null - if @tag - @tag.remove() - @tag = null - - # Revert body transformations - @log "stopdrag", "opened:", @opened - if not @opened - @onClosed() - - - onOpened: -> - @log "Opened" - @scrollable() - - # Re-calculate height when site admin opened or closed - @tag.find("#checkbox-owned, #checkbox-autodownloadoptional").off("click touchend").on "click touchend", => - setTimeout (=> - @scrollable() - ), 300 - - # Site limit button - @tag.find("#button-sitelimit").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), (res) => - if res == "ok" - @wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000 - @updateHtmlTag() - return false - - # Site autodownload limit button - @tag.find("#button-autodownload_bigfile_size_limit").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "siteSetAutodownloadBigfileLimit", $("#input-autodownload_bigfile_size_limit").val(), (res) => - if res == "ok" - @wrapper.notifications.add "done-bigfilelimit", "done", "Site bigfile auto download limit modified!", 5000 - @updateHtmlTag() - return false - - # Database reload - @tag.find("#button-dbreload").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "dbReload", [], => - @wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000 - @updateHtmlTag() - return false - - # Database rebuild - @tag.find("#button-dbrebuild").off("click touchend").on "click touchend", => - @wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...." - @wrapper.ws.cmd "dbRebuild", [], => - @wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000 - @updateHtmlTag() - return false - - # Update site - @tag.find("#button-update").off("click touchend").on "click touchend", => - @tag.find("#button-update").addClass("loading") - @wrapper.ws.cmd "siteUpdate", @wrapper.site_info.address, => - @wrapper.notifications.add "done-updated", "done", "Site updated!", 5000 - @tag.find("#button-update").removeClass("loading") - return false - - # Pause site - @tag.find("#button-pause").off("click touchend").on "click touchend", => - @tag.find("#button-pause").addClass("hidden") - @wrapper.ws.cmd "sitePause", @wrapper.site_info.address - return false - - # Resume site - @tag.find("#button-resume").off("click touchend").on "click touchend", => - @tag.find("#button-resume").addClass("hidden") - @wrapper.ws.cmd "siteResume", @wrapper.site_info.address - return false - - # Delete site - @tag.find("#button-delete").off("click touchend").on "click touchend", => - @wrapper.displayConfirm "Are you sure?", ["Delete this site", "Blacklist"], (confirmed) => - if confirmed == 1 - @tag.find("#button-delete").addClass("loading") - @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, -> - document.location = $(".fixbutton-bg").attr("href") - else if confirmed == 2 - @wrapper.displayPrompt "Blacklist this site", "text", "Delete and Blacklist", "Reason", (reason) => - @tag.find("#button-delete").addClass("loading") - @wrapper.ws.cmd "siteblockAdd", [@wrapper.site_info.address, reason] - @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, -> - document.location = $(".fixbutton-bg").attr("href") - - - return false - - # Owned checkbox - @tag.find("#checkbox-owned").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] - - # Owned checkbox - @tag.find("#checkbox-autodownloadoptional").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")] - - # Change identity button - @tag.find("#button-identity").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "certSelect" - return false - - # Save settings - @tag.find("#button-settings").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "fileGet", "content.json", (res) => - data = JSON.parse(res) - data["title"] = $("#settings-title").val() - data["description"] = $("#settings-description").val() - json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t'))) - @wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) => - if res != "ok" # fileWrite failed - @wrapper.notifications.add "file-write", "error", "File write error: #{res}" - else - @wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000 - if @wrapper.site_info.privatekey - @wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: "content.json", update_changed_files: true} - @updateHtmlTag() - return false - - - # Open site directory - @tag.find("#link-directory").off("click touchend").on "click touchend", => - @wrapper.ws.cmd "serverShowdirectory", ["site", @wrapper.site_info.address] - return false - - # Copy site with peers - @tag.find("#link-copypeers").off("click touchend").on "click touchend", (e) => - copy_text = e.currentTarget.href - handler = (e) => - e.clipboardData.setData('text/plain', copy_text) - e.preventDefault() - @wrapper.notifications.add "copy", "done", "Site address with peers copied to your clipboard", 5000 - document.removeEventListener('copy', handler, true) - - document.addEventListener('copy', handler, true) - document.execCommand('copy') - return false - - # Sign and publish content.json - $(document).on "click touchend", => - @tag?.find("#button-sign-publish-menu").removeClass("visible") - @tag?.find(".contents + .flex").removeClass("sign-publish-flex") - - @tag.find(".contents-content").off("click touchend").on "click touchend", (e) => - $("#input-contents").val(e.currentTarget.innerText); - return false; - - menu = new Menu(@tag.find("#menu-sign-publish")) - menu.elem.css("margin-top", "-130px") # Open upwards - menu.addItem "Sign", => - inner_path = @tag.find("#input-contents").val() - - @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (res) => - if @wrapper.site_info.privatekey - # Privatekey stored in users.json - @wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) => - if res == "ok" - @wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 - else if @wrapper.site_info.auth_address in res.signers - # ZeroID or other ID provider - @wrapper.ws.cmd "siteSign", {privatekey: null, inner_path: inner_path, update_changed_files: true}, (res) => - if res == "ok" - @wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 - else - # Ask the user for privatekey - @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key - @wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) => - if res == "ok" - @wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 - - @tag.find(".contents + .flex").removeClass "active" - menu.hide() - - menu.addItem "Publish", => - inner_path = @tag.find("#input-contents").val() - @wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false} - - @tag.find(".contents + .flex").removeClass "active" - menu.hide() - - @tag.find("#menu-sign-publish").off("click touchend").on "click touchend", => - if window.visible_menu == menu - @tag.find(".contents + .flex").removeClass "active" - menu.hide() - else - @tag.find(".contents + .flex").addClass "active" - @tag.find(".content-wrapper").prop "scrollTop", 10000 - menu.show() - return false - - $("body").on "click", => - if @tag - @tag.find(".contents + .flex").removeClass "active" - - @tag.find("#button-sign-publish").off("click touchend").on "click touchend", => - inner_path = @tag.find("#input-contents").val() - - @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (res) => - if @wrapper.site_info.privatekey - # Privatekey stored in users.json - @wrapper.ws.cmd "sitePublish", {privatekey: "stored", inner_path: inner_path, sign: true, update_changed_files: true}, (res) => - if res == "ok" - @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000 - else if @wrapper.site_info.auth_address in res.signers - # ZeroID or other ID provider - @wrapper.ws.cmd "sitePublish", {privatekey: null, inner_path: inner_path, sign: true, update_changed_files: true}, (res) => - if res == "ok" - @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000 - else - # Ask the user for privatekey - @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key - @wrapper.ws.cmd "sitePublish", {privatekey: privatekey, inner_path: inner_path, sign: true, update_changed_files: true}, (res) => - if res == "ok" - @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000 - - return false - - # Close - @tag.find(".close").off("click touchend").on "click touchend", (e) => - @close() - return false - - @loadGlobe() - - close: -> - @move_lock = "x" - @startDrag() - @stopDrag() - - - onClosed: -> - $(window).off "resize" - $(window).on "resize", @resized - $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) => - if e.target == document.body and not $(document.body).hasClass("body-sidebar") and not $(document.body).hasClass("body-internals") - $(document.body).css("height", "auto").css("perspective", "").css("will-change", "").css("transition", "").off transitionEnd - @unloadGlobe() - - # We dont need site info anymore - @wrapper.setSiteInfo = @original_set_site_info - - - loadGlobe: => - console.log "loadGlobe", @tag.find(".globe")[0], @tag.find(".globe").hasClass("loading") - if @tag.find(".globe").hasClass("loading") - setTimeout (=> - if typeof(DAT) == "undefined" # Globe script not loaded, do it first - script_tag = $(" - - diff --git a/plugins/UiConfig/media/css/Config.css b/plugins/UiConfig/media/css/Config.css deleted file mode 100644 index 98291d33..00000000 --- a/plugins/UiConfig/media/css/Config.css +++ /dev/null @@ -1,68 +0,0 @@ -body { background-color: #EDF2F5; font-family: Roboto, 'Segoe UI', Arial, 'Helvetica Neue'; margin: 0px; padding: 0px; backface-visibility: hidden; } -h1, h2, h3, h4 { font-family: 'Roboto', Arial, sans-serif; font-weight: 200; font-size: 30px; margin: 0px; padding: 0px } -h2 { margin-top: 10px; } -h3 { font-weight: normal } -h1 { background: linear-gradient(33deg,#af3bff,#0d99c9); color: white; padding: 16px 30px; } -a { color: #9760F9 } -a:hover { text-decoration: none } - -.link { background-color: transparent; outline: 5px solid transparent; transition: all 0.3s } -.link:active { background-color: #EFEFEF; outline: 5px solid #EFEFEF; transition: none } - -.content { max-width: 800px; margin: auto; background-color: white; padding: 60px 20px; box-sizing: border-box; padding-bottom: 150px; } -.section { margin: 0px 10%; } -.config-items { font-size: 19px; margin-top: 25px; margin-bottom: 75px; } -.config-item { transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); position: relative; padding-bottom: 20px; padding-top: 10px; } -.config-item.hidden { opacity: 0; height: 0px; padding: 0px; } -.config-item .title { display: inline-block; line-height: 36px; } -.config-item .title h3 { font-size: 20px; font-weight: lighter; margin-right: 100px; } -.config-item .description { font-size: 14px; color: #666; line-height: 24px; } -.config-item .value { display: inline-block; white-space: nowrap; } -.config-item .value-right { right: 0px; position: absolute; } -.config-item .value-fullwidth { width: 100% } -.config-item .marker { - font-weight: bold; text-decoration: none; font-size: 25px; position: absolute; padding: 2px 15px; line-height: 32px; - opacity: 0; pointer-events: none; transition: all 0.6s; transform: scale(2); color: #9760F9; -} -.config-item .marker.visible { opacity: 1; pointer-events: all; transform: scale(1); } -.config-item .marker.changed { color: #2ecc71; } -.config-item .marker.pending { color: #ffa200; } - - -.input-text, .input-select { padding: 8px 18px; border: 1px solid #CCC; border-radius: 3px; font-size: 17px; box-sizing: border-box; } -.input-text:focus, .input-select:focus { border: 1px solid #3396ff; outline: none; } -.input-textarea { overflow-x: auto; overflow-y: hidden; white-space: pre; line-height: 22px; } - -.input-select { width: initial; font-size: 14px; padding-right: 10px; padding-left: 10px; } - -.value-right .input-text { text-align: right; width: 100px; } -.value-fullwidth .input-text { width: 100%; font-size: 14px; font-family: 'Segoe UI', Arial, 'Helvetica Neue'; } -.value-fullwidth { margin-top: 10px; } - -/* Checkbox */ -.checkbox-skin { background-color: #CCC; width: 50px; height: 24px; border-radius: 15px; transition: all 0.3s ease-in-out; display: inline-block; } -.checkbox-skin:before { - content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; border-radius: 100%; margin-top: 2px; margin-left: 2px; - transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -} -.checkbox { font-size: 14px; font-weight: normal; display: inline-block; cursor: pointer; margin-top: 5px; } -.checkbox .title { display: inline; line-height: 30px; vertical-align: 4px; margin-left: 11px } -.checkbox.checked .checkbox-skin:before { margin-left: 27px; } -.checkbox.checked .checkbox-skin { background-color: #2ECC71 } - -/* Bottom */ - -.bottom { - width: 100%; text-align: center; background-color: #ffffffde; padding: 25px; bottom: -120px; - transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); position: fixed; backface-visibility: hidden; box-sizing: border-box; -} -.bottom-content { max-width: 750px; width: 100%; margin: 0px auto; } -.bottom .button { float: right; } -.bottom.visible { bottom: 0px; box-shadow: 0px 0px 35px #dcdcdc; } -.bottom .title { padding: 10px 10px; color: #363636; float: left; text-transform: uppercase; letter-spacing: 1px; } -.bottom .title:before { content: "•"; display: inline-block; color: #2ecc71; font-size: 31px; vertical-align: -7px; margin-right: 8px; line-height: 25px; } -.bottom-restart .title:before { color: #ffa200; } - -.animate { transition: all 0.3s ease-out !important; } -.animate-back { transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; } -.animate-inout { transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; } \ No newline at end of file diff --git a/plugins/UiConfig/media/css/all.css b/plugins/UiConfig/media/css/all.css deleted file mode 100644 index 7bb0087a..00000000 --- a/plugins/UiConfig/media/css/all.css +++ /dev/null @@ -1,125 +0,0 @@ - - -/* ---- plugins/UiConfig/media/css/Config.css ---- */ - - -body { background-color: #EDF2F5; font-family: Roboto, 'Segoe UI', Arial, 'Helvetica Neue'; margin: 0px; padding: 0px; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; } -h1, h2, h3, h4 { font-family: 'Roboto', Arial, sans-serif; font-weight: 200; font-size: 30px; margin: 0px; padding: 0px } -h2 { margin-top: 10px; } -h3 { font-weight: normal } -h1 { background: -webkit-linear-gradient(33deg,#af3bff,#0d99c9);background: -moz-linear-gradient(33deg,#af3bff,#0d99c9);background: -o-linear-gradient(33deg,#af3bff,#0d99c9);background: -ms-linear-gradient(33deg,#af3bff,#0d99c9);background: linear-gradient(33deg,#af3bff,#0d99c9); color: white; padding: 16px 30px; } -a { color: #9760F9 } -a:hover { text-decoration: none } - -.link { background-color: transparent; outline: 5px solid transparent; -webkit-transition: all 0.3s ; -moz-transition: all 0.3s ; -o-transition: all 0.3s ; -ms-transition: all 0.3s ; transition: all 0.3s } -.link:active { background-color: #EFEFEF; outline: 5px solid #EFEFEF; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none } - -.content { max-width: 800px; margin: auto; background-color: white; padding: 60px 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; padding-bottom: 150px; } -.section { margin: 0px 10%; } -.config-items { font-size: 19px; margin-top: 25px; margin-bottom: 75px; } -.config-item { -webkit-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -moz-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -o-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -ms-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1) ; position: relative; padding-bottom: 20px; padding-top: 10px; } -.config-item.hidden { opacity: 0; height: 0px; padding: 0px; } -.config-item .title { display: inline-block; line-height: 36px; } -.config-item .title h3 { font-size: 20px; font-weight: lighter; margin-right: 100px; } -.config-item .description { font-size: 14px; color: #666; line-height: 24px; } -.config-item .value { display: inline-block; white-space: nowrap; } -.config-item .value-right { right: 0px; position: absolute; } -.config-item .value-fullwidth { width: 100% } -.config-item .marker { - font-weight: bold; text-decoration: none; font-size: 25px; position: absolute; padding: 2px 15px; line-height: 32px; - opacity: 0; pointer-events: none; -webkit-transition: all 0.6s; -moz-transition: all 0.6s; -o-transition: all 0.6s; -ms-transition: all 0.6s; transition: all 0.6s ; -webkit-transform: scale(2); -moz-transform: scale(2); -o-transform: scale(2); -ms-transform: scale(2); transform: scale(2) ; color: #9760F9; -} -.config-item .marker.visible { opacity: 1; pointer-events: all; -webkit-transform: scale(1); -moz-transform: scale(1); -o-transform: scale(1); -ms-transform: scale(1); transform: scale(1) ; } -.config-item .marker.changed { color: #2ecc71; } -.config-item .marker.pending { color: #ffa200; } - - -.input-text, .input-select { padding: 8px 18px; border: 1px solid #CCC; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; font-size: 17px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; } -.input-text:focus, .input-select:focus { border: 1px solid #3396ff; outline: none; } -.input-textarea { overflow-x: auto; overflow-y: hidden; white-space: pre; line-height: 22px; } - -.input-select { width: initial; font-size: 14px; padding-right: 10px; padding-left: 10px; } - -.value-right .input-text { text-align: right; width: 100px; } -.value-fullwidth .input-text { width: 100%; font-size: 14px; font-family: 'Segoe UI', Arial, 'Helvetica Neue'; } -.value-fullwidth { margin-top: 10px; } - -/* Checkbox */ -.checkbox-skin { background-color: #CCC; width: 50px; height: 24px; -webkit-border-radius: 15px; -moz-border-radius: 15px; -o-border-radius: 15px; -ms-border-radius: 15px; border-radius: 15px ; -webkit-transition: all 0.3s ease-in-out; -moz-transition: all 0.3s ease-in-out; -o-transition: all 0.3s ease-in-out; -ms-transition: all 0.3s ease-in-out; transition: all 0.3s ease-in-out ; display: inline-block; } -.checkbox-skin:before { - content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; -webkit-border-radius: 100%; -moz-border-radius: 100%; -o-border-radius: 100%; -ms-border-radius: 100%; border-radius: 100% ; margin-top: 2px; margin-left: 2px; - -webkit-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -moz-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -o-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -ms-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86) ; -} -.checkbox { font-size: 14px; font-weight: normal; display: inline-block; cursor: pointer; margin-top: 5px; } -.checkbox .title { display: inline; line-height: 30px; vertical-align: 4px; margin-left: 11px } -.checkbox.checked .checkbox-skin:before { margin-left: 27px; } -.checkbox.checked .checkbox-skin { background-color: #2ECC71 } - -/* Bottom */ - -.bottom { - width: 100%; text-align: center; background-color: #ffffffde; padding: 25px; bottom: -120px; - -webkit-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -moz-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -o-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -ms-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1) ; position: fixed; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; -} -.bottom-content { max-width: 750px; width: 100%; margin: 0px auto; } -.bottom .button { float: right; } -.bottom.visible { bottom: 0px; -webkit-box-shadow: 0px 0px 35px #dcdcdc; -moz-box-shadow: 0px 0px 35px #dcdcdc; -o-box-shadow: 0px 0px 35px #dcdcdc; -ms-box-shadow: 0px 0px 35px #dcdcdc; box-shadow: 0px 0px 35px #dcdcdc ; } -.bottom .title { padding: 10px 10px; color: #363636; float: left; text-transform: uppercase; letter-spacing: 1px; } -.bottom .title:before { content: "•"; display: inline-block; color: #2ecc71; font-size: 31px; vertical-align: -7px; margin-right: 8px; line-height: 25px; } -.bottom-restart .title:before { color: #ffa200; } - -.animate { -webkit-transition: all 0.3s ease-out !important; -moz-transition: all 0.3s ease-out !important; -o-transition: all 0.3s ease-out !important; -ms-transition: all 0.3s ease-out !important; transition: all 0.3s ease-out !important ; } -.animate-back { -webkit-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -moz-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -o-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -ms-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important ; } -.animate-inout { -webkit-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -moz-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -o-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -ms-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important ; } - - -/* ---- plugins/UiConfig/media/css/button.css ---- */ - - -/* Button */ -.button { - background-color: #FFDC00; color: black; padding: 10px 20px; display: inline-block; background-position: left center; - -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; border-bottom: 2px solid #E8BE29; -webkit-transition: all 0.5s ease-out; -moz-transition: all 0.5s ease-out; -o-transition: all 0.5s ease-out; -ms-transition: all 0.5s ease-out; transition: all 0.5s ease-out ; text-decoration: none; -} -.button:hover { border-color: white; border-bottom: 2px solid #BD960C; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none ; background-color: #FDEB07 } -.button:active { position: relative; top: 1px } -.button.loading { - color: rgba(0,0,0,0); background: #999 url(../img/loading.gif) no-repeat center center; - -webkit-transition: all 0.5s ease-out ; -moz-transition: all 0.5s ease-out ; -o-transition: all 0.5s ease-out ; -ms-transition: all 0.5s ease-out ; transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666 -} -.button.disabled { color: #DDD; background-color: #999; pointer-events: none; border-bottom: 2px solid #666 } - - -/* ---- plugins/UiConfig/media/css/fonts.css ---- */ - - -/* Base64 encoder: http://www.motobit.com/util/base64-decoder-encoder.asp */ -/* Generated by Font Squirrel (http://www.fontsquirrel.com) on January 21, 2015 */ - - -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - src: - local('Roboto'), - url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAGfcABIAAAAAx5wAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHREVGAAABlAAAAEcAAABYB30Hd0dQT1MAAAHcAAAH8AAAFLywggk9R1NVQgAACcwAAACmAAABFMK7zVBPUy8yAAAKdAAAAFYAAABgoKexpmNtYXAAAArMAAADZAAABnjIFMucY3Z0IAAADjAAAABMAAAATCRBBuVmcGdtAAAOfAAAATsAAAG8Z/Rcq2dhc3AAAA+4AAAADAAAAAwACAATZ2x5ZgAAD8QAAE7fAACZfgdaOmpoZG14AABepAAAAJoAAAGo8AnZfGhlYWQAAF9AAAAANgAAADb4RqsOaGhlYQAAX3gAAAAgAAAAJAq6BzxobXR4AABfmAAAA4cAAAZwzpCM0GxvY2EAAGMgAAADKQAAAzowggjbbWF4cAAAZkwAAAAgAAAAIAPMAvluYW1lAABmbAAAAJkAAAEQEG8sqXBvc3QAAGcIAAAAEwAAACD/bQBkcHJlcAAAZxwAAAC9AAAA23Sgj+x4AQXBsQFBMQAFwHvRZg0bgEpnDXukA4AWYBvqv9O/E1RAUQ3NxcJSNM3A2lpsbcXBQZydxdVdPH3Fz1/RZSyZ5Ss9lqEL+AB4AWSOA4ydQRgAZ7a2bdu2bdu2bduI07hubF2s2gxqxbX+p7anzO5nIZCfkawkZ8/eA0dSfsa65QupPWf5rAU0Xzht5WI6kxMgihAy2GawQwY7BzkXzFq+mPLZJSAkO0NyVuEchXPXzjMfTU3eEJqGpv4IV0LrMD70DITBYWTcyh0Wh6LhdEgLR8O5UD3+U0wNP+I0/cv4OIvjvRlpHZ+SYvx/0uKd2YlP+t+TJHnBuWz/XPKmJP97x2f4U5MsTpC8+Efi6iSn46Qi58KVhP73kQ3kpgAlqEUd6lKP+jShKS1oSVva04FOdKYf/RnIMIYzgtGMZxLnucAlLnON69zkNne4yz3u84CHPOIxT3jKM17wkle85g0f+cwXvvKN3/whEjWYx7zms4CFLGIxS1jKMpazvBWsaCUrW8WqVrO6DW1vRzvb1e72so/97O8ABzrIwQ5xqMMd6WinOcNZrnCVq13jWte70e3udLd73edBD3nEox7zuCc8iZSIqiKjo9cExlKYbdEZclKIknQjRik9xkmSNHEc/9fY01Nr27Zt27Zt294HZ9u2bWttjGc1OHXc70Wt+tQb9fl2dkZmRuTUdBL5ExrDewn1Mq6YsX+YYkWOU23sksZYFqe7WqaGWapYtXfEp90vh3pH2dlViVSvy7kkRSnM9lH5BXZ8pBn+l7XcKrOvhzbaTm2xe8RZOy1uwak2imNvGn0TyD9qT5MvZ+9pMD2HUfsWy2QlhntyQyXYV+KW3CWVU/s0mJEba4Y9SZcv6HI3Xd6hy9t6yr6jYlfOOSpMVSlSVdVcC51jIVX5Df2ffCT5OLIN1FCt1JVZY9vnjME4TKBDgprStxk9W6ig0lXQmSfXWcC4CGv5vh4bsZn5LuzBf9g7VD4rKBcVbKBq+vPUmEod7Ig6WZo6owu6oR8GYIilaqglawT+w/xm3EruMWo8iW+p8x2+xw/4ET9hHzKom4ksnMN5XMBFXKJONnKQizz4YZbmCA5CEGqpThjCEYFIS3aiEG0DnRg74sQyxjHGMyYw+jjjIj8KojCKojhKojTKojwqojKqorE/z+nO2BO9MUb5nXGYgMn0nYrpmInZmIuF3GMLdtB7J713830v/mvJctXYflBTO6Vmlq4Wdljpdpj/4g/OOEzAPEt3FpBbhLV8X4+N2Mx8F/bgP5yLp9LTVMqgytdU+ZoqTzvjMAELmC/CZuzCHvyHffGqaZlqgmSkIBVpluk0xiRMwTTMwCzMYb20IuRTLDpZsjqjC7phAP6Dm/EI64/icTyBS+SykYNc5PEOfHCRHwVRGEVRHCVRGmVRHhVRGVU56yi/wiSFq6y261m9r1/kMOulwRqmUfQtyt3S1Rld0A0D8B/cjEvIRg5ykccb9cFFfhREYRRFcZREaZRFeVREZVTlbLT68emHkREchKA7eqI3a2Hy2Xq5eAxPgndPvgmSkYJUpLG/MSZhCqZhBmZhDuuuuqu0eqE3+tlqDbLd8jOarXYEByHojp7ojcG22xmK4RiJ0ZwJCe/NrRSxN/pFFVdhyb60bMuyzXbJXrNVlq04e8TuVVBhp0VYsn0S5P6T3nhKrpKCrp9qP1gan7daSjD1/znsjDdmSMpvWQGrZAMyL3Nbwu5Qonx2j70vH+MzZCqKrD1nhe0/ds522Xbzkdlnx6+5e0pgd7x9bdaW2Vv2qf9pyeb4M+x7xj6WpHz6u0gEYRevq7vQjvtftzNXs5aNxvqbsNS/XcmmBmHfev8pgvEFlML3OHh1nfG4nRVhaVc+EwL+XnZek0m3k3Y341tKUpLttxNy5dq9ircaImsp9rnt432+ZB+y70rwVqlsGd7sB2wQWbwvwo56K6fpefU+3n7Fw8teH3ZehL2hGwrLvrGddvL6ftLfzb23f0E3FHazgguvny2+Mj8XsJ721786zgWE/Q8XFfh3uJB8lq6AsA3IuDLbF7Dq7Q8i6907+Ky4q7133XyzN34gr4t9aU9fsz5QwUWIGiiCR4rlceTjCZHLE6oKqqIwVVd9RauxWpLroE4qoi48xdWdp4T6qL9KaiBPWQ3lKafhGqny2srzB6PljBAAAEbh9+U6QJyybXPPWLJt27bdmK8SLpPtsd/zr/dcdaRzuX3weR9dvqmfrnUrfz1hoBxMsVIeNjioHk+81YkvvurBH3/1Ekig+ggmWP2EEaYBIojQIFFEaYgYYjRMHHEaIYEEjZJEisZII03LZJChFbLI0iqFFGqNYoq1Timl2qCccm1SSaW2qKZa29RSqx3qqdcujTRqj2aatU8rvTpgiCEdMcKIjhljTCdMMKlTplnRuZAJ87LVl/yp7D78f4KMZCjjr5kYyEKmMvuoDGWu19rpAlV6GACA8Lf19Xp/uf89XyA0hH1uM0wcJ5HGydnNxdVdTm80YAKznTm4GLGJrPgTxr9+h9F3+Bf8L47foQzSeKRSixbJMnkSverlDibRndmS3FmD9KnKIK9EbXrWI4U55Fmc0KJ7qDDvBUtLii3rOU3W6ZVuuFpDd39TO7dYekVhRi/sUvGPVHbSys0Y+ggXFJDmjbSPzVqlk8bV2V3Ogl4QocQUrEM9VnQOGMJ49FMU79z28lXnNcZgFbzF8Yf+6UVu4TnPf8vZIrdP7kzqZCd6CF4sqUIvzys9f/cam9eY9oKFOpUzW5/Vkip1L9bg7BC6O6agQJOKr2BysQi7vSdc5EV5eAFNizNiBAEYhb/3T+ykje1U08RsYtu2c5X4Nrv3Wo+a54eAErb4Qg+nH08UUUfe4vJCE21Lk1tN9K0tLzbhbmyuNTECySQCj81jx+M8j0X+w+31KU1Z7Hp4Pn9gIItuFocAwyEPkIdk0SD3p4wyWpjhCAGiCFGAIUz7OghSo4I8/ehXf/pH5KlcFWpUE3nBr8/jPGIYi5GmJmjiGCsIMZcC7Q8igwAAeAE1xTcBwlAABuEvvYhI0cDGxJYxqHg2mNhZ6RawggOE0Ntf7iTpMlrJyDbZhKj9OjkLMWL/XNSPuX6BHoZxHMx43HJ3QrGJdaIjpNPspNOJn5pGDpMAAHgBhdIDsCRJFIXhcxpjm7U5tm3bCK5tKzS2bdu2bdszNbb5mHveZq1CeyO+/tu3u6oAhAN5dMugqYDQXERCAwF8hbqIojiAtOiMqViIRdiC3TiCW3iMRKZnRhZiEZZlB77Pz9mZXTiEwzmNS/mENpQ7VCW0O3Q+dNGjV8fr5T33YkwWk8t4Jr+pbhqaX8xMM98sNMvMerMpfyZrodEuo13TtGsxtmIPjuI2nsAyAzOxMIuyHDvyA34R7JrKJdoVG8rx9y54tb2u3jPvhclscpg82lXtz10zzGyzQLvWmY1Ju0D7yt5ACbsdb9ltADJJWkkpySUK2ASxNqtNZiOJrxPv2fHQJH6ScDphd8Lu64Out7oeujb62gR/pD/MH+oP8n/3v/PrAH56SeWH/dDlxSD+O+/IZzJU5v/LA/nX6PEr/N9cdP6e4ziBkziF0ziDbjiMa7iOG7iJW7iN7uiBO7iLe7iv7+6JXniIR3iMJ3iKZ+iNPkhAIixBMoS+6McwI4wyGZOjPw5xFAbgCAayMquwKquxOmtgEGuyFmuzDuuyHuuzAQZjCBuyERuzCZuyGZvrfw5jC7ZkK7ZmG7bFcIzg+/yAH/MTfsrPcBTHcBbPqauHXdmN7/I9fsiPOAYrORrrkQaa8FG4aSvBgJI2EBYjnSUiUwMHZJoslI9lUeCgLJYt8r1slV1yXHYHuskeOSLn5GjgsByT03JNzshZ6S7n5JLckctyRXqKLzflodwK9Jbb8lheyJNAH3kqryRBXssb6Ssx7jmG1cRAf7EA00sKyeDgkJoxMEoySSHJKYUdDFCLODiiFpWyUkrKORiolpcqUlmqOhikVpO6UlPqSX0Ag9UG0kwaSnNp4a54tpR27jHbSwcAw9WO8n7w2gfyYfD4I/lUPpbP5HMAR9UvpLN7zC4ORqpDHIxShzsYrU6VaQDGqEtkKYBx6pNAf4l1cFaNc/BcjRfr9oVySE6A76q5JDfAD9UqDiaoux1MVM87mKpedDAd8CAEOEitLXUADlC7Si+A3dVnov3sq76QGPffTGbJAmCOmkNyAZin5hEPwEI1v4MlajWpDmCp2tDBcvUXByvUGQ7HqDMdrFRny3wAq9QFDkerCx2sV5c52KCuEz2HjWqSTQA2A/kzOdj6B09lNjIAKgCdAIAAigB4ANQAZABOAFoAhwBgAFYANAI8ALwAxAAAABT+YAAUApsAIAMhAAsEOgAUBI0AEAWwABQGGAAVAaYAEQbAAA4AAAAAeAFdjgUOE0EUhmeoW0IUqc1UkZk0LsQqu8Wh3nm4W4wD4E7tLP9Gt9Eep4fAVvCR5+/LD6bOIzUwDucbcvn393hXdFKRmzc0uBLCfmyB39I4oMBPSI2IEn1E6v2RqZJYiMXZewvRF49u30O0HnivcX9BLQE2No89OzESbcr/Du8TndKI+phogFmQB3gSAAIflFpfNWLqvECkMTBDg1dWHm2L8lIKG7uBwc7KSyKN+G+Nnn/++HCoNqEQP6GRDAljg3YejBaLMKtKvFos8osq/c53/+YuZ/8X2n8XEKnbLn81CDqvqjLvF6qyKj2FZGmk1PmxsT2JkjTSCjVbI6NQ91xWOU3+SSzGZttmUXbXTbJPE7Nltcj+KeVR9eDik3uQ/a6Rh8gptD+5gl0xTp1Z+S2rR/YW6R+/xokBAAABAAIACAAC//8AD3gBjHoHeBPHFu45s0WSC15JlmWqLQtLdAOybEhPXqhphBvqvfSSZzqG0LvB2DTTYgyhpoFNAsumAgnYN/QW0et1ICHd6Y1ijd/MykZap3wvXzyjmS3zn39OnQUkGAogNJFUEEAGC8RAHIzXYhSr1dZejVFUCPBW1luL3sYGQIUOvVWSVn8XafBQH30AbADKQ300kQB7UpNCnSnUmfVuV1TMr1pMaCZW71Si7KoT82vrNi6X1SVYEa0ouNCPLqFJ8AFyIIN+T/dgzE0iUIokGJTUO69KpuBMMvmulUwJ9if980h/ILC56jecrksQA2l/AS6aDaI5OFmKat7bdan+r300lAkD0LoNugWfkJ7RNiFeTvHgv7fG/vdo5qh27UZl4kui486bLR98sO/99wOBPNFG3DKAyDiqC6qQppEoQRchTTUFVEFRzQH2NsFt90m8QUejsbgE6/BWmkLX4fd5vAECkwHEswxtfUiCghDaGAYwpgatwgYKG4TlUKoH9digHpejYQwHP0NtmJaogVAjkyoG1IZ8r3gbHWBia+bwxWhFrRPgrS2gmhU1Xr8rIaCCoibqM404fhfD7va77C725xP4n8/h1v/cApslQXqrW0G3H9DSgVJs2L2gO5q7L+9+4ssON+52W74RzR3oLVxHh+O6fBy8GDfTgfxvMd2YT4cTNw4GQBhT1Vq0yuuhOQwPSW9hYllqBE5hgxQuI0mxcHotihoT4K3CW82O9wQiilY3PEpR1KQAbz281Zreu8KESvd4PR5/ekam3+dISHC40z3uFNkRnyCyQbxscrj97LIvPsHXNkPoPXft+Y/2b31x2973c7Mnz1qAbbY/e/y91XvO7l6Zm1OIk/8zy/fo6S2vnom/es1ZcXLp69PHDJ86ZPLGEcWn7Pv3W788tLhwFkiQVfWtlCMdhFioBx5Ih3YwJSSrwMQTamR1s4Gbycq1JyqgRqVpVrEaNp/TEsMjt6I2DLD9Zj+0ZuHphorW5t5I87t1jfSnaZmCm//KTGvdxp6e4Wub4GCCulM8fqcupd+f7mEMYHpGsn4lOfIC50byojNra86C17bOnVeyqHfXTr16ru5J7t+K8rattJLPdO7Zq0unPtSURQ5niUU5JdvzOs3funWx6elhg3t0eXr48O6Vp3OKty3ulFO8dbH8zLAhPbo+M3TIc788JmY/BgIMq6oQf5EOQCPwgg8W/IUeNGCDBjWKn8gGiVwpUhpwpdCaWRrwTkhpxjulWQrvrKFJe+iWuqEuwVqXE9FA0ZLwHk+uJKuuWoy8sJpwojK5mnC6uFqYMIMphcnp9sqMusZS20w0ca0R4p2ZGRkhooa98Nqgxw5sKzzQZ+xIfPzxrdMD5YO6Hn7+PKV4cdU0usG1dW3KpEmPtx36ZPeBuDBLfWHS8k6vf7BzQe8Xuz9DZ87bVLXt9oTHOnz6xDgsTpw+b9Iy4fOBy//VutdD/6fPWEB4XnRBUPc5SsjjSNUeh4HlPibomIsvSivocvwEEBbQZuRFeSRYwQJqnTRV1DffZst0ykQwKfYEp8njJQum/jjXs3KvBZf2eMGzYGoFeeZT3IzPdZw2jqbTz3rQWfRmycDxXXfgcwAIHvbOzFrvxHhCTN4Mm92fTog3M8FmI5kv/DTfu24v6b1hsHf+D5NJh0/o8/T1LuMn4U+YlnwGs7BRt/FdaAkdCggNyCChh6RCHUgO7bvIdlfU9z1QlwWSRNXCektaIlsqNVNi7jnVKdlNguDFrvRMK2xlWRuFTVvRk4dm7Hl7pnCx75px2Ju+Mqbo3/Sn/phMv/w3R/40rBTTxXchGuoBe5kKuvuQMWxfurtzuKxuK3N2Vh/ZiIV0xB46Agv3CLE7aTqe2InFgNCQlmM6XAUzOPmbNPFeEOEvBc6yV3ct8XJuVn/xnSG0vHPO4q0rhh3jOFJJEokl74LAOGQ7p2GkY2ILk1iaiF+RpDWAsJzFsUlwmnFdP8SMiTFj0p2hFH4qk0crBw9Xy9tn339/dvtBrR95pHWrhx4CBFtVjqDokdAODFpkKGRPOt3o27WJDNw4U24JQGACs8IoZoWxbL32oRWj2M1R7Oaws+I2GKVoVjR4pkgpFOJOIYJfsfna2uxe3S5MVt2dZIpR5RVfXxfLv/u2XNg9v2DZPJK/OH+BQEbTvfQA+tH3Bz6K7ehZeij224sXyumlihvnbgJCCQC5LL0Hcg0uiUGR/pxsgMQNQkzThLB1E4FPspzCbZX8qT5yeQ9dTGwNxdP52w4DIPQDEH1Maic8BcaAa3i3MyLSBDRBcfKVFEWzhOcVHps0h1MJrefyY41fYDGmse5GEF2ir7Ij3hrXY9GERWt3o3D5eAVLa6aRqwtI69mbemSv3LDk6K3zuy7Si7QPIPSvqhBuM3SemogRywDF1qCrywZ1OTqI1f0apGkfA/bTNgGO19L4rwGA2WqsQdNj9cwNFM0TJsnuAf58XUVtEGCtlhS5oT4mhhKSosYZ8kgpJjcORUkupNeNuYtzCqumFOwOfnTqm+kjpuRUAR1Oq/YUzspdtn7VYqEtyc1GyB//5udX/jtAa+FRZx/4ovzdCYuW5MzOI0DADyB2Y7oaBXWgizEChN0ClxUtIseKzAGGhWJZDvIsRzPL0XpCqd/EwTvcukmjD11Wk5B77NieYBZZcjA4Fw8m4Ndr6A7sPlr4qbI9OdYEENYxG2jJUDSEQSEMyJZFhiFMPrcAVDQxzJ4pFjkiU5pWLzwpmeqxSc62NcB3ID4M1sSjN/MTduZvBEapzRFPWDT2+hKq2XSnmEynupJvgm+1GJl3+JtfrpT9at1pXT5p7qpN86d2aEOukAvb6YSH6e3rN2jwwoczZ6svrdzlbwIE5jP8DaRdEA8u5vPCKlxbAr7/GCkBVEvgiFQUrUGkHjjcsmi6Bxf8fgVSBWbcjholEJ5JuVQF8RMO7/vst1OnaSX2wn+dGbA56eWpMwtWSLs2iLduzKe/nrtBf8ZHg51wJRZLwXHZPR9/+9r7LxbuBmQWCGIqY1+GtkY7D28Fxy4pkQYO1QaO6OYeVEwNvvZf0qeyQrgkdb7zvpRYBCDAOMZLHd3KXdC8Zm8d7IUO9vawsnH98locnAsvsyUv9ovcUqGel+tWnFffWUukmagORUuJJCtkJKEsKyKTEHimpfOFes7ZNoPRVjFhcPaCqsCZ4NzsQeMqykq/W/PSnTWrcuatpt+MXrigfMEiMX10Ses2H0z+8PqNDybta9O6ZNT7ly5Vbpm2rujWsgKx3sKJY/Pzy5cAEBhaVSXc0uVsDL0hXO7USGlnAzuXUrBzO+FpBAj6L7tBRQ1OXY2u5RF4BqRLxLXB6lBAcvuZl0hlLt5fk00LD923ZeCsvcPHnsi7dJuq9M3G3s9/p9/329B449RpqwvInA7PzbiRt/KbGfRD+nUG7UWnSuvFL+9kP9f13Zt7175YBlVVkMsi4GjxcfCA7XdAE4tnfwgTQInwhIk8kLE7m7Ko3IPd6WX3fCJMQBmUGAAlIsvW7wSEzvCRME3sCjIkROgYu8r8up5LoeRAPzrQTLIrTzG3NT94AKevxGkHOL9FWCBcET4GAUyQCsxgWOKgkxhp3ZpYK6rzlEK4UrlPeIz/Ca22BEs3AyDkwgHhmvhEGIsenDkWKaBKHIuOxC/UD44UelaWkEUo7KO5K+mCUiDwRNVvwiS214nggmf/InYls0Ey3+v6UthY6itchUUF/jZ+QSh+seCVmXkvfmWEPL+Jpbzh8ngYaftUznNjsobP2E0+e/fDsy+P7lJWXS2vm7zouYUDRmdNHvXvlw8f37WzZNSzRfSj6vIZCIyg98sXpDXgh8fg/4LaNpSbmBlis14BBbS4tmYOMS5Nk8xx/JdZ0dqTsL0F1LaKVj88wUrWZgG1WZrmDs/FKdojJFJvmd/y6sqbmWHjEjkFmeclNnCliMQk20Q+cuoJPrHbbCxoizaU9dwl086ZkI/FXHpnrz9jcddlK+1xU/dnPTunW7p91fglsp3uptpReuTt6Jjl6D3d950HUh86mXWHFr0VE1OOM364jUN33P25zrO9HxjbGFu1e+SFtfj7z/SrbT3+9dXJ11BY3fzh4IUvr7+NC7DoMM37/RZdVdbCPcHb9gZuxfpox/d+uE770uXLioYPsOAfDb/nLDYAkBpKKpggCjrWzp5rHxfIbCBzdbCIRPdfkVqrRemToZIffehmvXAyuDH/EGmxjbQ8GHwKf7iFM+h8dujSjdQjxSBAMYCYp2fuCZAEPQzxsnb2BHqEdKZpceElzXE8ieKRSAkrIRpdjc/qCmccshvZkCUjrlRXKE66ivHadz9MHDopn35FD+ODuS/RT2kppsxas6SA3pTUA6XDNzR37Z5z4DopDv66eBqa1s0aNWU0AMJkFhEuSQcYhx2MftKY67ITkrgAd4A2g3OsGzliSRNXLtGdDFZ/OtcacLo9TF0Iq6ZteuJ7qT698T2l9OgKjNr5FSY6y+puLXz/9CFt8/YGeOrLu5iNGUuOY/prNPj5jvX0x7tLv6NfrXgbiM7yIcZyNDig/T9wzJmLCaNirMbW4lG0OVnkFk2ClXltVtoTbzG+tA8bb8JN9PKBs8fK//j6gqRuo8eO9jtFj71OJNvdxRhf1eMW2gkA6kg66kiehrBG/Sk/ixZlvq3RBqcoKoZsTdHMBhdpdTmq/4TrwXzyv8ohwqpgSzKZbAlWbpDUjbRF9fppbH0LPPIPuq5ZiBhW74j1ZeOK7ur1TgQ3lAq5wfvIEJITnMnXqgMI05h2XGPakQSD/7+04+/qIa1RKLo2Sns7rlFSI9Lv7YcbPcM6rWEEmlRZ5A7H61eA7ZLTTVwpRKjWHB46xGtd6R+qRivWEPRhwk1MSCrNoOVlh/H6/lEv++lOouwfkbUV04/Pxi444usL6KI/0arJv9FPWrfHTutD3Elmfe96GPfOUOYZFMqwqyrwqoGTusmC2VqaBftFbKheXXFKfaz1SeayYEppKSkvY9s3QFKDy0g215/3WDNZr0Yb/sORsf4uH04uLZVU/pSfVUAn2M84aGXMZ8PBm+Nj4KRIA+CpvzWUfvlCxacQXXb39OWfS/PnTV6Fknr39umK8iMzlxQuhGp+JJ2ficbMM1x411Y041kyEJ6FPmLtCn1hBEyDRbAOSmAPmPtp7YGRJUuEX7dnyB3lnvJweZKcKxfKr8vvypZ+DKtJJw99iG5SX2PkLfwq+BEZ8QV5bTeNZxS2JoHgzMqz1VbQgCGVoMk/WQFE6hfXdB+OIFrl0rINzJ6qJZa76967j5FXw9YYlMAQo8Mn1Xw5BFE/4A91URCqvizEx+SyoxvtrMcteA2v3S610ZRV1G0vZXvwH/FVFk4yydC7w8Si4KbgUY4trK0WeFLDKG5Axk0JA6mtPQbz1IgEOiq944qFnGYMqai7rIx8sl8cfHcjA7JWfB4ITKqqkCzM6q2QBO2N9baRiFglslASaxVK8aTantNDGYTDq5+JmHSTtmVKluX0lvoG/X0VWYnRb+zE6OX7A3vfPS2c3b3nhECKL9CybcXY/lTWGXxsezHdf56ggA767e8j79IbGBeE6qhQqlfLdnhKi4rXS5YonsBBmILahZMWLeCfXbMQjm0cPaeIeSFW37uro6zXhVmlpO4PGEf/+IMWY591r75aQNeT+4IsLv169NznG1bkz1svAIHRVVGSzPhzQApDZXY3DuVtat1qVFYGxGrYP45KMFv5fVZDVGXZXrKRU5NkSpX/jtdkRivmTkUxh57s3O0etyrjtvTkvndOC6dxIuf2LP2454mpv9ru8VtCy84j+8/J+b1Dr1fzuw1APKpbhxMGaVKifrwi8S8k/2B0hgpbU0JplmJIs6J1y+Aak2AMR9WkyyZ0uLGGd7KflpThp7+jZVUO9jwVHIPeguItRfQKeSr4lqRev5B3rG2wMIZ8s3rGwuUIgNCNxa1sfl7EUIO3CVvL4O6NH45UmR+ZsFarE0boqaeHb4+hHKzHP6ew1ljj8hKQbcSfvqFw7a9xu+ke0vOPG2i/Vvjt3LJta5dtWoMjTw6hFV8WUuaMPnql6OVCkt/p46I3bkw8MXX+mplj+0wfPv3VsbvOTzgye/7aGRde4FK1ARDX6HluK6M4RvplxRDyA9XE8gi6hrbYT1uKwyXbne8l20ZAWMKYKmHvtMEDmmSPZzIb3aDhBMoQa7Q6BnORwWRKAS9z36FzEKtYgrTqmu8HepPs27HllTcltTLlFL2jECSfCtcrPRt37tgoXAVAnr+LQf28o50GJl7vGBM8g9MzujZAQfdpqXqy7iPs69qZ4M2S4Oenq8Rdd7qF/OiDAPJ3uox9DG7B6EANphnOB2oUOo4N4nQfL0RxbyqHuli9YwQ4M9HHGjvH4TVxMPhZg6aY/DLWbZL0aRndtJOeczrp0Z10cykeL31TuFVpVg8IN+90E1PHjr17leFDaA8gntLj70gjBWE8tZ2w8UgcUOTx1ZILhfA6vAsiC7nVU/nyWrlY3i2zKQFkjt0iQwi7HnD1/31kPvb7lKbjxZt0HS36DC9R3w1hHmkVbBVMIe2CR0g5OcM5jWNI9zKkZmhjRBrGY0AaBhdajwdCHxmGM67QqFIadY2cJ1crxwZvkCRhBX9/TwBxmh77Hoe/Tz4ifYoI3NHwcwcpPGmRTGwyFPv9/AzCge2FR+9eExpV/iD8sWHDcnHexqV8vZX0CImW54AJUoAhVk2182YhUttZ+ORZM4nev58uxKnSV7enFJne5+9pwr41tKv51kDSIm2JPci1o4lKBqqSeptnMRZ6BHP0VVP1uzFNJZH4VTQm7HZ+hsKSCQtOo7llZfKcW52L5Dy+7iPkshCv25DXYENhVQ9oaOLGwheRuFOornBL9r2BzWdjs+3iXtqIXAw2BQSxKksoAgAB6ke8pnZCJfHznKLKUcLqNWuAa694Ca9IFARwg4q8yMV+9z5foRI6WXo7jiQRwpM9vvyVTZR+wh7zgB43K4RvxKehETSBqZqzaTO9WFbU5Opo42QgnIm19d9QYROnnnlF845HePZ4ZK1ti3ZWx50kw7GeOzKH93h5vsx9uu/edwv94MdpjXc69NM9dzI/2muiRM19a/NJxK/fnjh+SO6eCQcn7T0nemh0r/XuFfSNicndc99ZXLy3x6AJQzs9u6b33ldpnRd7K0v7di4/3GswEN33JssAdaAuDNVs9epzbDZFFQLAvFI4s0w0er1a5xiSWdCTzRjeqTG1S3SnMX1gJz8mnmNnJNusXi6dycrdtZh8s/TkOEvJ7nG46Mbulfnvdevx9oLVxHqLnl0xU4bgR4vpBRqUPjxVQluUnAKE/7C9qmB71RC6aEqjJLZ0xNFbYu3cBiIzGiYfP2SLZ60RHqfWV4dBBKu/mnG3R98AxjZ5aMhq805p0sEx/6N3J15e/e5P5p3mgqylL63LmdK337ah6EVI2vh73pUdWQuPl7r3HuMaNYCh/FEGiIN6jOHE+g04RYkhhuU0w6moIZE3opeEGJ1hveMM2//2s589neW2TsavmysRCf0DgkwrF2JAxf59Y3eXWMYe+uC73UW56rP/eiOviHhuY9o8kn4HJuZh+i3T+4GN+NPaMxx7P4b9F8awg3GcpZl1jjl7LPcKw0usbQD1zMDvq5f29v56H9cj/WodhigRH7tCd5qNOZiUAv57J9quhITQSSCmyCaX3+MhT12jFdP/N/fsN0G3+NaiwXm+8Xn08rgiG2lkzotH188pW4IF9BsafGrzwW6P9T4tHHtlVZ2lLwHCAwDkmOxg0gzR4hK4FUZI0ShSwRMjQ3Ft+TjfaEiPYyOdpWoPML3i5zzsJF7/1OA0hRSIfwD7cvv2PSWPPByV5u87+Msvhe0FY3fssxZasgZnF1T2AAIDaU/hZ8Z4XWgMOVpKqofzk8KTQzDAC9tfYmT9a+ODGjcV0hsup/b/uHsP8CiO5H24umdmV1mbFwSKC1qSESjawiByjiYbBJIJJgsRDrCQwRiTBAibIJJE8JGxEWPSioyJ4mxEOM5gnI/D2RecpW193T0rNL3Ahef7PekvPTubd7t7qqqr3nqrNtzJQjcRHlHt/DlmniIFYYp7RJjSfAG8O03jojC5SqsVq6yvz17MCdzz242Zn7bKmrV/cVHOmVPflK1bfOC5gXsXU/nyoqbLZ1d+euOfowfnrF6/LHM+SvzX0etb0Peb+D6+HED6xABgpnocZLHy82JKEFB4wevjd8LonbDacJ/tWUF6M5OaFMMiXa67PKRHnfIuoMGSB43PeX5JvMcjHS0i+d4U/KeZU7N6VzE2Bwa2DY9TznO+WhvVEBpGP5m55kjPrHtEHnANScigCDCMjr420OO5rOHxcjqKfqpNm+effRZw9WnSAw2l3xcCDmbDnHV4mMK4ffAE00tPsA6wo4aAwe/2BNWk6B1hU2ycO0VzgSUmgdogepD7rZNjktu0s6alpNKxpMrpld3IZcuagA795eMoulkGHxYgtg5yiAHouGbqgiymIqLWPxmDCeAYiz0d/FGYcgii/qDv6UchmIuGoFoQJk1zCstmeDyjUL/PyDB0+w76aQ5ZaICqkbPQaPKsdxkg2AyABhrAD82Keiyaxc6EAdgcCwAMs/nuMUuVuWUTNewJBk5Qt5p52+gdW82devROPe6lB/AEuMKvSgMEcL0O836czDik+iRVo2ewG644doXSlVnlXzyX+tYf0GiDZ0L+i0uCyx4c6eCR02cvf7t3FlnsbYrLZ0zPG+dNxBe+3VT1tZxeo0t0VmborwZbrOKsxIkIm/ijEQZzz5k1CNZrldNfrVArw9zLOrWS05ds1qsVHRRgGEa9jGQ6qnCoBx3UkPqRPg6rVR/D+2+AqlVwfuuKjDC6dMAYctQUQQ1Hji/hsPxPCj9C5jmfvXGP/FC2a/mKnXuWL92N3VvIMvI+CS2pXI4SqwIP3f3okvrRXeYBkSw5io8tAqaoVm1/tjL8RtBBXRQqrJzFPxxUQkRf6DE7tegLMVFnkiA6Q1Gfn72Q69kTmHvl3S88m5fsHtB/32vF2PwLuZHv/UW5O3s5uUt+l4/eWuutXHOT+xkkS/rBN4+Jop/xH3YOLuQWYfX9PY7/6G6kMXjxEXfj6wtncgKoQ1d2/itP8Ws7Bg/ZvqgEx1ejxq9M/j0ey7NRy6qAsltvYEvhnzXZxUV0BqHQWZXDWKZRB/gLg/XbEbj/jHURV7CPh8CX07e8TlzUpOWRdp5D0rBdqfWlNcZNXpDT818PA8R9tONyb47VBGpYjXC6BeKjKtWvIcCGUhxeUGtJQCPrm0pjK+hRbSCSXhvUcBD8Ga88l69xTyScSx7s6PPZgWP3y155Ycy0Cci+v/+XngWXcz1KwbTx81B0j/7PDpjR97Vjp9b0nDKkS4eObQbNGfz6geE7sjInD2RxXfW3eJDSFuwwUg1zOEVEo46ehFDnUU6NRqBjoZ8ksFAC9FNldBoLs2Nm5tnw027nYQvzfMxocXl5aruYp7t1mvvyhQtKW/J7oTe7XbuQdbZ1y/CWQmQABEvout+jJsJErRXFMESMTBiWuN3oCdka6Qo/xgdoyAbD0SAmkFRApUaTrr91GHku3+rsKZ0478oFfMbb6ecSyVp5EQBBLIBUJqc/HgMSRK7OIxiQImBAlF0ZcpLMXUFmn6yUMiovMiuIoCmAcpPeDIEsVQkN8/98Ub5FyX9y6AXBEt9ktKugYN84OAbEhmK1JsndKzzkwjryWzWsIxeP/blqbbXUqvKilFz1Jzm96rbUBBA0BpDK6diCob8wKB3qU+ffoz5BMoek+NUj6I6VbeSSxNAd9MvfPyAlaPLt33//C5pMSm7jA6jA+5X3I7SWTMQu7AQEDtJDKqWjCadeEZjM/iul8wCF08KcIwhjuq8nUwDTU20M2OV2pzgZhYCO4/uqi6TXmHuuTokjxsc1Ji+Xo3CpaWU0+acUuk7uOWaK3BwQDAGQ3qEjETGgOv8HGFA6nlO1Aw/0HpKSi4qWSHU3vMoxFPIGLjG0hjrQUrXWjeAzD02guqgjhkUbWRZLqo2iDPzDOQqckuxKSUxJSWURk5myRCiL3OLEsw++c+sWPvBO/PVdu6T3yRuJ909c+tfr/6w4+lnS9A7kb+VfDH3+/vvku/ZsBAcoJ6zjE5mqiPlQHdeuJf80nGKvttLxTvONV9HGyyCPOpQxH8y9WTMdr5mO11I7XsVi5uN1plKmchods4nGFQ6aEU+yx7Et3Wi9ajx8+Hr8QRXdunX4QGU7FHTvwYDnvrqKIjpMT/zMc+OH1/9VfuLzRPb9r6I35B+kOHBCe9XMcwNQ68g4OOZUGs4DfVuC3paF+9uyYCYizAI3x8wiG7l9djipsKTIPxxf2nX+nu5Neg/Ydqyg5/LStpE9R0qBJXdS1jSYOAJvfb/ttiA8YyRgKCDr0Vi5F48fEnXxA1QwaE1QaaHkBTNtYdCc1WVlrjqLG/bufljxgvdXfqv09EUNiNYwBFMmajzEwnMqxLnYnGu90Dr+wLGxQg99BHHow8ZsNzvWYUe1nj8AYtBqLzAVJwuvzRBQkO6jKQpiuLjK887l8oOedWcMGgiy6dU5Q1++EvHV13Go/j3XLRQZ+/knzlvraqAQBMMAZBZdxcJctb7/uB+B9qNtPK6LTlBHRtM8d2E0ylVPR6NM/WwE+iGr9gmo0NS9NJrRAR4/Q+S0GWONsYwml5bipluVJOzFlAqKzga0wR+hyl97NUrEATu2Bv50+dTHp+fljF8QiDLwlHsbhxUXB76aFfBRMZIvfX/r4MS5G/NJVTEApufmvjJM/gfUgyaQoeKmzbR9qdRdAeL+ZapgMS4WUECKRbn99i+30Z0WT7XEncZ9mDSnkXG/nEZkczgSOamZc6HkPluuX9uyaEHBuKmrF6wueff8lrULi6aMLVxYlTX9/Ofnc3MvTM09P33qwgVLFq/YXP7+m0VL1s2es37pxjevnt+yagnOy7v1Ut7NvJduzpl9i2lVNIBMkyXgqMkBOOiwHUISs76/vxhulZqqEOKgEz4Ubo224sxSKxM2elQtWEcPZvpoZEc1DNfKZQXH5Bnv317D/ef/KAmPRZM+JCPQ02Q+mk/mnyWLGPKMniEj7klheLu3Rf6OueQUaj93Rz6uYOdgNbVgvbgFM0IdZsOERJWqIKkp1TXqEDDXcHVZWRk1+c6qr6TL+GfA8Dwxy3OolCZDR5ivujp1phNiVT4ptYgoLw9iH+UI4NU8DpOaoaO5OzJ8MFkYFUgBcWnh4ky6FiY1rfbByLQW/CuYkPAqIiFC0AjezJGJT0l7yPFujqlM+JJ+cq0X6ZCjcEOKHWu3nVw+5DllnbqSqr9OvdK5oOzQ5iU7V14/cibzSPsuKPjjL5Hs2V2wctvTi1H0ntx072fP9+jbI/U1VL9Z7wEF6MDJgS2XjN596elnct/DC4pmZg0d36ZFzqacsiH04Z2XP38vf9P0Fzr1bde3a/Yr++rUs47p1Llv++fMtjGdhkxm52Gs/Hf8g3IBKMgHkYyhqauWYNlOo0nTAh7PaRhFw5obY33sxbe1a2UYJSxS69fUZwRBgmG0kutvynmuac/AWtWd3oqThZnMsWOqT+Oa05PVvEZaU+mdVO7DpzbXSLeHwqVoCWeqQc1TeeI+4RAEmYLoA2FBEi9ewkLg8/CeWo9n3UpTaXa8tuyrOdVgWX/6uD8sOvs+knZDm4Xy9i2U/NXAxSiPNJMeQxPpPsaCPPKtkuKTpzdt3f/GyGEjJk0aMTzTi7YiK2qLLFtLyHfbtpJvt0w/jnqg+aj78UPk8MUL5PARPHDDtptHppTe/OPaUQOX5eXOXjZgzML95MOdO1HD/XtR3K4d5N7ecvT8pUtkZ/kFsvv6NTSEawx+Rwrna9kQJqlh8W42szDGjRfp2aocb9fqOlguB8t2nujgV2zXt1OVrt3mzcHscU7JkPSJjhj9AtUkOlJZooOtjltbK5rm0LIcTJbxhBBDz/mzFuzaP2lupz7b9i99bWME+WPTIfWn9h+Kz8bFD5r7Ys7s5MWpSSEvLihcRM5n98trVG8lykgaQfnIY6FIGi29A/FQ+jsBI5SijtUEEMxDs6RTUgwoEMGzbaiCGjaRHcfcHU4YPlXmzZMy0CwUsA1keJ5K3n26WmEQBcnQGvaoqW24yqcyN4IdrfzoEhkgfhCZVagorFdbLBjDfXjKGVbjNMZaHJXJOFMclcmUmDhfHeHpFJR5CFJMKfTR6FqhbBSdwt9rKk2oKE1IYAWXrbEuVheFLM3GaLa1Mqgws8vJxcwbc9pd8cnueLc7SSuecT3vL27TqUBu3YZsxcXkWy6Q6MwKZNuwZ/5LyPx6mGSaXrq565Deo5fhO34yd4nJ5B4Ut38fimUy+RN5W+r3an5eu8SNrQfFmxp4zFnyfNw+tVtrAASzlVipPbfnZuDFJpLI6Zbae1NxuRJbCBgWSGfwXHpugsEBCeLys3LVkAQ1EAt8G2F1uOhxnXXWwEk2x4K1E8atXj1u/Lrq1O7dU9N69JDPjNu8afyEdescXZ5J79FnUnfAkA0g/ST/C4IhHDqzajQxog40Pa7OrTRU4HsoYQa2eQYr9RScKdbA8YK0pWgSWbOLzEOv7ELtqk5KHaRBReQFVFKEiitD17OVao834X3KcXDAADWAo8lQGyoJBC0b272wUEgV5tC0Xg2ofTyMV/LYHMyR5YuNauuoWImqLRzH4n3ePajZ5LbP9uhSvAsFbJw4oBQV4k2TUMTYTi1b93xm2pp5U8ZN7PM6IGiDC/FGpQziYaka424kjk8opWLjg7phWinVkRyYB4UgZaoZgHKPhEM0JICklVSxARtxLXk6rK6PyRxfq1E2XlOlRmqfV5eaID0VXdtSxaoqnxQ8rKpyu1DggO5dMzo/06P4zblLN3duv3bvkoU7S/p06Nxt8xB5TOsWT6UnNX4hb864tGF1GxdOyH954lPPPpuUy9m6efIHuH5NThrTnDRGmRrAcohNBWcyB1GiOWqJl1ayyP3ZT8mPaxVC7rL3b6TI3vdyOligrxoq8GN0MK4Ql3JgxOJPg5J15CdjqHZGzQ6O1mnJQo5Fov7oxRmX2pTtCszcu7ofBXS9i9/cvF6Kqbw4fXE30lS5Cwg6AEhtOeetqYqDQ8RM2iOUcwQBGunPTI0Oc1lizXjRgL+RX1DQ31AoDiC3/1z9e18209V4IpojdYNAcKiSj22IEw4G0HF/UO8eV9GaEsvVWoklvsNqLBMyqGDADNIL7QWWy26nKuEmcZ1MfqDtIavBZaDGE3GI4qDR9xWlSEMLYjURcGvuVhqKDNmwtdDYZ3DbF2KS672RnTsxOaFZk8BFjJ+Mt6MfeEVkWxUx1OiJhZE2sTAS+xdGst3GSAsj0Q/FH6BRFrwdD31m/kwATL9Dldw8TxRBv0XSsF2JuU+iiVOD6kmaF6OaJCEDL/mZucdWlxtfOrFx04nj5E+n3swe0H9kdv9+WVgeVfLu2Z3dt5w7t8Mwetr0Mb1HTZuSDXxfXS/Nlg5DPBwMBTDCQTQB2OMDAZTXlbfADReqP8Tr6bWK6kAAMsJlfBsATOLy8JqhvgDKFf4eFb6FAP7e23g9MsJFKYq/R+CA8ffkACjfKcf55xfx91yWGCRghEvQEm+qeU8sfU8sfw9g6EjmSbNpfF4H4mCwGqixIgNZ1QDLONa+nsXnYIrlSNZ/qs8pjaW7tz77FiYZjdqqJhk054ZV7/C4PoWJL+6JGmcdC8YzJo/O9+DPjp6/vXVye1+1Dt49Yd4fzo5qOHl67rBtf7ryzlsHcnu/gVpTr/epZjxj+E8A42DOwbbALJGB92TKuGo2gIbFPJH6rwaDr1ZAyNYL+5PFAL56WilWcrHtycovKFYyDq5aEe7903ufS1Olo95eNtzbe8yBz/5+AF2ORtlki1K6njQu8n6HZuOPAMFQeF/6SB4FwfA0r58PDJF8hQJBgdzrlqVAdoWCZJ+kKxWqUQ7iL9KwGitCaQg5ETIiNBR1J8dmoW6o2yxyDHWfRQ6Tw/ReX9QnjxzkB1Kah/qRAwASZRa/SSt1vgUnxEBjGKvKTZpyjWTeLjvGV4gFXOJKRpg4vuliVzxmq8cpJJECQbMB+yA13p+IzGgvafG8LoVnTIwOq2JzsiQFNirJbuSopSTvezV75apTjDd7e82LK7YsxVXNXsDJY3dSarJkf9r74bA5D/nJz216cAaN688YtPk7qo+Tu6N+XCEtyaEk2tAjr1YVtmU0Wgw7AeRMKjeh4GCSz30DrXmHyLUUfVQEwb4CX5N2y0TPlcAMEwmYsYlatMr8FqvZx51FWci5+t4s8usX5PuyMmRfuXUrrVUiH44/9/K5B+QSvdnB+3HR7LwixLKyNFM4wWCBJpRvEtu0mWhNo4TSSf9tJsjKkd8wxapl8PT1ojHacy7+HIONGokVEzUbv90Whe01VAdt62ehtuYgmFFHz7WyQxfm9zgx6OqRfofjm7ZcnDIxt/vJwQXjhtyVB1d8886W/KudkkauWtJzi9qs/qaYZiOeS85avazf0GsDRkwkH4IEvau/NcyVe9P5pUBruKhiHjkwB6B5BTs+8zieWSS9EynSDvzRMhzJXZwQxcmzjpR6E3IthHoWTpFvE8LZIBHai9P5VWk6fXH6tXS6F8YKmt8Q1YYV2iubVrB8ZoJgB1OpLioxboMujIuvjeOcnMVj11g8aRSTrg3qHJzQwwCK70nlknafr9h14ouPPpkybvzyY/88Pr00MePt8Te+9DYyvr12zZyEtiVVgV1LEv86c/kEqe/0tWYcsch2aNCIt4qK3x44MW9KP2vh4f79+wwm1V9NLz3dM3rJnHXdU7/DU/r3ypSS9xVEL1wNgOFlVlFuaAaR0JT6x8ZmT2k4fWmjCqh1PKP8ExvhdY2+6kczv6XG6RBHUZCQhULu+opcZzzD75gsUeROcnOszhf+S8m/zfxg0eJ7c6Zee+XNOS1W3O12ZuHRZ344cLLbOBxbMPz17bvm529Q7ORX8mJmiXfVK58uWv3Vgmnvrlgz6tVhLbekFrwyuupfT7fudnrX8vOfH2N2rQvsl5+Sy+itUHBCb9WoMeWNPPIwMsDXr80F6/EU4nN7Dhpq/Z+DppoHHdoNX5iFHvpe5oe35KeqIqS/ebdqzph2xEOOoXTulbVpU0V4C4yMDA2xeYmyAI5xNlk85WDJPAIolZkRZUeXyAbwYyS4dG1iXDLfeDm6K+vRXbVuvXDu4zPGZg1PgJtaMz8x3AJbNaNr8Nnc1JRheZ8VThnRbe7Yd+d+umrcoO5zR7/nyUaD23RdthuPHUz2p7Uv2EUJBN6CJmve20jOlJClrrVX16K0czn4SMzdw0dyvH3rfugBDGspl8D9GK5fiD+b8v+eQWB+hEHg5gwCT+65xxAIjFu95Qv9GQSRAAqrIrWCEybq0iiPlInYeBkwy6iYbPwW8538qJSlEu9dpXD43Vj7sJOTpUwcpA9nPa9qO0PQC0scJ5l9Aa+CFy1ixUH0iD86W/UC/ogy/laurAJWzCbDShRHPkZx3pXnAMEmxgGS0/04QHWewAEqK9MyshsB5AyekR0nit5/yXMqxbyrl4HW4hkoHnPacI2FFAn0tlrNDkhX1YsMPh+fn60kjdp0emJZ2TC04hPyLPryK/QeSZLTSSoq9/7Le5ONLw5Arsd37WFiPzIxB4xCuO+G+FlAQn2nREenr4LX+qHxtiMcrOK4e0O7wkswjSlpdGDjkZH8xgrU6LpLPQbkD/BeK8avN8lvgrf7xoSDDADB0F3XmSbqkd4gctC/GxM1SRW+Skbeni3Nzoga2gAmlZSUrVpVJo1pndfa68BvpuWl4c8BwXbSQ/4Hl8/nVYPN/vg6kUfdNosfY7BU1vvyamgYr8O3hPlS1ZzpyImOKSm+IjX5H/s2t04Na9h6iTeJFgS+R5nz3t1llo1hFV3kCZXraNHaenkcW5vXSQ/p73R3j4BsNZRp/39kX/HFs/h300J1tDBOTxwXuSU+9pjDqRsup5BxUlZa6Iyr7xzDuzbRUbvaL83JP9CPSvzGtyuuVv34x2OW4tBz+JeC+a9V3aKyj2Fc9TfGQN6pwgWvq6hBQ37iTKURFYLQ6Vbx39b6lYaJPgeEcX8sQbUJ7oXjSS0uQvTuNIs22IaK3eZkC7PlD8uTFY1kxDsaGQOrStVp28lyVEC2z90rdWYVy6x6uXJ57tjJk946h9+1r0Ph+1DKfmQustEi5mJvVb0weWX4/Wvk0s1v2O6UXf2tEei5i4FmkAzrVENKqi97G1/Bji2E3UkgRgikW73Pxs6lMYj7XC35VWnLBDVMbwx1THnVpr0ygl/xIEKfDCp96uGG5nDyY41b5eT+6qNMuIY+Byt7zocrl15p3e781GtfexONf1x0Ynb3pT8tfi+jzaVF98ivnq0FS7duW7Z4u/zUqHUOHLYUu7eSpTNHj51Ovpmx98KklxdOHT0qF7UggUc/+Mv7R+7cvv3msoj8dUzetwLgBQY7z3ZLPNst0kVFIRH0jhGkU2vI0XbzVlS6vdUAZ6Oko/Lbe07ZVwZ/VJnlY6ArFi6b0TBMhZhYvqNW/Lv+UIoWsSsJfkE7CFKmiElhhTUMiE1hVYxG6rKlJtH7DCZ305AsliW9PeQLclb68cePdhS0TnCUfImao9Gbyde79nwcXnXtpg0NRZ1mGhFG9dMjCkOHkMXk4IAL5PSREqR8GHf3r4Cq/0p64BN0raIgV7VFx9Ah6nIrUXrrJbr9IsGFdxYUM+BB+imynGN4BcvERAhpjFozkZrCiekP195oT8JZV3dvbJ0YFtWhXZd9+/CBba0GOOKf3SdflfZVkl1HLatDxw2X5cLZu07YVwe9+xIAZn0ClWJDGjihIfSnaSG3z5OLq/g3xbpqeKjMfWnOWg7VnwEmHHFPrtxlqcwkk+JwGvX1u2b5Vx4sk5/XIhYr/31TVuYu8ls2OnXtJC/iPX1Vi5F3ozbXRt9A7fZvMr66kLzTev/PMsLIUVPIG4FQDUu1TGZZbxedk1Wzg1ZmB0XNF9v3GGSrz06EVIhRJ5tTrD9r1TcVo8OfvKrpLHNFry3p0nbdtW7UF/2Y/MOza0XBrj0Fy3ZzB3RZwOj55KOkZXsc1AlFSZWUx/qhx3T47l3Q6igNkQYMEdBTDdHtPhY6VItQcVrfHxpGoRE+ox/AToxYEmtnI7ZRQ2vAj9RXTs/ecvAc+vFmN12N5Z+Dl66+cT3E+/IlUuWQxVJLzvlTwuVVUBeyVCOvN4InUBEFP+yRiNcewNfdzqBz1cDvaBxrsfUTA7YFGqC9DU5RwldvLZVryYAdO0bKqw6tlquO61mBr2JX10mAqg+RHmiMnA6h0EgE3gUfQ7BtSNA3NGbv+lbJTL26Usr95L2qplGrWX29/FfJYAAIgGSt5o86RjQtYIw2UkdSkVnAWbdUYbVrND+A6LVs4ska/gzvBEZDmhRrkmTYsG7thp+nyt8H7d0bgkxcHuQv8M9KNQRATG2G81A4ikb0s0FGfMUq6PIy/yvJLrmklCR0Zt1WkltZrAzcG0S+R5YgQPCKfBV/oPwFQiBeDeRWnoN24RLKVANrs5jcEaZKwNc95mHuBH+wg/y4s6hnt859lL/MWb1mduc+vbuwGgP5ezROOUdHV0fFgcxZ9KMI6GgBK3wsgME1lRMwRz6E3Ya+EAg2aKJKdp67krQeyJJvGdUMI8rkD/IA2FLD8OL0KoWPjuscds8dNjwv71geOdyhZYuOHVomtlfmD575h/0vvTQooWP7Fzp1ZquZSPqgN+BpMEFzlYJJvioVwYlTlYcw+5FwU7QpwSRlslQCjfn5Nu3rQIZeTs/t3SI5tPPzQ19clPfUsEFdI+Y0Gzdo6MantWzRHamN8iU4oQ2fCj9Dh8IDogMwnwzvH8wkPVxA+G2196h5dYpsNg7GRGGOO7TJG9742eym9Runz52T6Xo6Kym66TPKvUmLbG1CM1oaJy63pVs6PgUYRsgVUjOlmrNoWjHo4EkpK7br8CZZD6MhNkwjfdJYk8+SkiQXzrxG/rVn8oW765Rqch0lkOsckyET0Z+rD/N8bTKbb9tgkExSjNRCaispmVqnk7aBLQLbBvYNzAqUqeAGoky2y0kmXmbl1CVtKT+mxvd5eXT3Li9kdev5wuDkzi1auBom/rNzdlaXzpkjOrno3QaJyYC8I+Q7ZI1hBoTxWnYq0IAyueTQL2QamGDMMMqZdEoq0uisoeDTOncqk5w0Xzta7wzUo/OwHsa1G3v3QvKdDUpUb/eEFwe27htM5dz7NNlOrNV/gABfn1GjTsCVGgH3Pq1J+E+agLM8ynZcIK+Q4qAznLkDPd9ryx5bhQuUK9pjC2Hs2LZMXrLklmi2wQoBEKsGBAaJUVEUE8pAnz/EYgZO7EtORWETMqVj2QZr13mrl8wYexkQtJAdqIsBhM/R+3Iq8EaO+r6qBsOG8ZnSUZQtO7ouWLVqwehLgKABuY9awWEIgCjf5/yn5qwrxg+TPKPI/W7z3vjD6DHldJ7j5Jb4OJ1TPOwJYLmlPagDzy09KzvwIgPQx/eGsMf3ogxgUtSA3MSj4We+xi18NWSM6qhQa2B59Ls1qSqVmWXQjcMpDugjeizLJje7Lt3g+eOkm2359UQqtQiWYSeOk64yNJ1mnMN9FvFgUG2eUujtvCxn+LBpU0Zk5kjy4KmTMxsOnpIzBBBMgg04RjoMBparUqjpMyo1XYQZNsAaZUYhvILcQe4VOJ5MRwut6DWePVmPw7T3cbmVjMCtH1tTZGe87wfITe6sRJgQ6TDJs5I8tBIVAqJ6PEWaoMSBBIHsnfyr0tzI+eY4fGncFNYCmq1yKl6Fjys7JJqxA8CrwCpm3/iigY7P2ZhGS7E8i6LDUR8BKRrX5SBF4wQVdGxAAZuoASaYejfm5LDGvvq2I+H2aHuCXcrUUwnrspQNT+frmz+ywMnCgjaGWvpTPflFYGOxgNIZK9nJQamW8ynt3SlvLzY8pH0a0HCyR0b90e2ONdzPTvlL8o/WkD+P5i8BhbEmDam+/vEuiKfrclAH5osOmB97Uux7aQpx+lA1zls+FG6LtuFMNrEGCQzyrJPgk2ObgA1GV1AIlVc28+ax9RMoBkppRKz7vMyDoXCkp981ZhiMGu/k9T3uwIiHXVrtHI9DPjwuhV4YHscubpeSlBLbMMmNUlzK4E/o3zlylrxw5g79O4P6ocLTVdmoVfZdbPsTuUV6zpqFPx0n7V+/Zj1rpcwu9CaWvVVYrqpYs2bN+iNVD7Yw/d1FPVeJrlw0NILtqkuruncxzFqgn+oWsMb7iqJ3ovw5z2JNXpRJJECryqMBkxpr4x5EbIK+dD2qpre7QyTmIl+1i9NX7ULp0i6NOuVM4theTSdehdASGFcy6tZ57suFtgeXrnjQnPLvbIVl5ZUvnCkoWLyQRli6opijJ7H3qlJ65ggykN/JGyuK1q/EVB93V38bwHpHx0MqMKs3WB7Ir5+hh8Z81VzghqbQAlIgHY5C7cLU15ck+jeUEiIAsZ7GZqrHAV6ftDFpSq1gMifTuwLK6+Yy15TDeTame0zmGnEitiiciWyZKYbB+ETJpij28cmMpaY+E+Xrcun7TQMjbWshuSR+4QpLH7Wy57j0pcWyi9XldKY1ZAeU5HYb5cWo/6Sz09eWJXxF/jnjwBKycMWBmeTn+wlHXp9+ZgoatGTbF6hB2iHy0o408quUsaMZ+c0zNKRxdNVXgw2RjVDHTKfTKd1C90iD9efWkyj0ObvQm+wRdK+q/Bz7IzubqBcdzjNv4fr9cnKAVQ4CKCU8LqgHo3WC+m/rRQUoUs8NVsw1sAXoY3o1nPNgSsPZrkAFjFeKupluIoaU03QavaICiMsO7JY9Y3LISQ9a6kFtcl9EHrzjLTn97GnyJuo5bzaqGkmDj4sURD8+82V8wNv73HnOThrJ+xSfBxcsVu085hV1TjRNrkAH103BigcKVhxYJMy0N5wdmVWKpvY7Ojo6IVrK1FGvmH2P5lxJhx9BvxbWAslngSxQU0dv5ARxqR+ZLx/aMWOsbfbsX8kXBpX+BaHIf01YbJs85Y8HDWgeY4vjyHdvxG2NQg1RyNyl+ciAoqO3u66eyF8KMrPWygmqPXUhClzQCI6J3QXFPsfB+kSf2qAR4ghdgjq1AeWjQQNTg5gGUqau9Ri3G/TpSPZ0pCkyJpJNvfbp2ApmaqbGolw1JlasaYjhBObIGle6PifLN+BZkwZsTdkjFvYCvjkwqai10yncBNldTiM9GGKRm64UW69EFEs7dKIdZy7SP1z34Dep374r4XP3J5LlqKPsnYzXZnj3oqH7vZW4+4ASsps1FJNaFI0o+nHh1KLEZkU/o6PJI4qGovuDmMQ0AZB+pSsXAWPFDV/c0uoKeBtilkMbcqnkZxzYVK3cEoclCNB8oI936KKzMlIz62ItudxsN49Noz1S6EEq/7at+Urz9ZafP0TffeH9Hv2Wv9nuPdkcW1v8TB4kSMWKpd/MEvWQ93wIHp+PJg4vORVQAghiqr+XI+gcomCF2BBNBBmsZkUDr2lExXqmghNl6mdVt8LntDhZUwwtoeLXv9lewdQhlM/Qwowgm6cisBOiFLPWmZIF9AbOFGGpkBR6YVXwdqOdXsypFnOKHIFXkV8O9J30I/07U0n/Tl2RpNE3yKWdFvx8jpqzgV7QUFI9XZ2+gV68H2NkQoFDfN31v6HWygnDVahTV9Rz/9o+cTsVay2DuAUAgQkSwt02O/O5HGDmtUMsK2nALNywAHWrcfUDpHhwyWpP4RbskZDxE4+UG0tWkLtHL3+ClBhvMi6PJT99cPECikST464A5hoq8SqUaJgspiLEhKmB1yizNJwiCJzB15jhUHhQNKP06wZs48/a6bMmdmpDxF63gu+jteBjalTbDa6KHDx9jf7hul8jC/ntn9TE9iEH0fObtu8uJJQVTb5D1pKlxfjO91f//AAtRfFvLJ9XjADBblwgfSMxD7yeLk/pYBAc8mM1f8MovrigiHe6GYkGww8MydHFVJpjd6it3FfGmTVR1cMg5sL4rvhgn21dJ88b3nPYO6Ctp/Qe739SF15VA7RePwFs/v9THxSepXosG4WL0v/fDiksQ1u+b9+1k1P3Refnzhr/0Ue4W1kZ7ZQy/HB5682JEyeOKKximV7ez0X6is7HAcN1QGeUWOIu7l/iMC3+rXCNgoNsYCZJqyLXhuZ6iJxTprzUYm7Pyw8eePbtQ2cOjkFNPcoo242JdGx0qH9461jr3xsBINgir0TrDK0gAELoGLVTJgTiTSe2kjwDDK36j8pZsqDXW8AYpfTwg2QHA6ToyE8O/xaSsoIeoZKWYsZdFWmknESKoD0A3ifFPJ4b7vBPotgFbrjNHsa5kGG2x1PE2Zf+99zwxzLDq3/CG+no4iFXHJb46xoaJXwu6+Z1ZD6sgq0gZfozwMFYwwDHIgPcj/qtRsazLMz/CQMcXf03DHDM/HZ8XLI/8osajn/zixr4Mb+oEWzw/0UNKkSxbkQjDrMR9504sZgsNaA528jCT8yo6YI9e8ZiA3Gg2PqAoJBanmAp7om/dyMFexfiuczeSFAit8VTDNNA4h07pold/msgsgxjH+NIYw6DyHhXtSMZuA8eiSWfKWpr1nj6GdAHRgJj8AcIqGEo9QCMeiZVXaOelG90GUVk7+FJQgdP3pu2YHTXjqOyO3cdPTCpgYsDfIZpx/7SOXtEty7DKcaX2LJBfGJydXXNr/xgA5g5UtQQQP4r589Gwtj/7hdsrsmIcjrYYYuMcnXrxmpoQeh1pviltErr+8ycvuk3baDHiJ6s6ze1dpe2b9e1/u5C/nbl41/QV7c/RRF4YxGeV9sDHG8kErL8lsl6gJPo/7fmgoD+SawHU12YANTREvJtgv8hMpESmD8Wzg52E8dM7EIAjypUbKpp8xoioER1tJ6kYj8bzcDTABTPJQ+EdlF793pQXfkGuS80jZJvFBUV6bqihkNPHSfmkU6R4UGYh3JiX0fOgzIwT0To7FTh4wrxBU/hfaOlvQ9O377NmqeSZg+ktKorUloR6lhSQk4Aqv6R9vuYqrSFSJguNEvQ7eBibw8haEM+DF8FBWXqx2EWFi6A+0yKj3jH3F/0/zV2FeBx3Ep4dN7TnYOGMzc5s8PwHEOYmZMyM1zytYFXZmbm1hSnjD6XufUXfFRmZmau69snjeRZ7WkLHyS2/N9/o9nRrDSSZpRhYA6QvIA8IHW9uUA+/bQ3G8hrr+l8IA9fnerUwQ+25OqHL2bcdVUlhci4ULW0bxaBWWwMq4eYP9lvsl9UFKcMQB/JniA0jYZkfx+6ntBNsD2AeyA30eWEbofNbILFPcAx0Lyb0An4VXAXpHFnOz90lMj4KfFfSp9oY8vYdOsTA/gPaKzeJ65Qn4AIiGt1rFy0H52aJSsoiPYabD+WPef+LNqxTkBkmmgfqnQJ3WwGxMx7A6QdG30kOy8APcCHnkHoJrgiAJ3FTXSE0AnYJNAFaegcTzvuOwJ3KkozUsnu3kz8FMNKhrU0HQCh5Qb6SKgjNF2PSXKFdj8VaJRdo5vcaQHcUa7QLwn0PpEIoRPuGk92QvcRsseU7CprOlrOP7TldLMJtt615WCuc7TKWm3xK1ijRtNBimRZNBh9JHs3AF3uQzcSugk+D0JzE11J6Hb4mE2y0BWm3LyH0AlWIrgL0tA1Qi9jtF4w0zOO1vG6p8Np/JHPTMZQdht9JHuY0HSoIZnnQ9cTugk2BXAXcAPNuwmdgB+80UroIiF7hZYdsw2jNJO1NOcQP6VESPbV0mAe2XBKoGfrkfcigEbT4f7ksEwLrbkPDEAPN9EcNJpD0+EBWGYyf0HY9oRjYUf4sJtJigS0AEBBGnoM+6FjvNQJSbIHfaINfoS+1idGCC3W+z6xD34CPZho/FK075maJXO5iva52oNNRQ+GGUhRM/O1HjeTZuiAbjKOmrHRR7IdA9ClJpoDolGPewdgmcm8mZgTcBHpxkNXCd2M0v5LppQ6JCxHxwXIPutC1+dhJD6sJbkKINRgYI8scX2+S2K5wrpPC6zYl1dY9F3Vrs0cZQr9qEDPDm8idMLdWaAL0tB9GfkulUEQLWaFspj9HEuWPMWu8vqhvlfqpyOk871PJXpQZjD6SLZ3AHqwieaAaHw6hwZgfXJ8Qdj2Ax0LG/dhN5MUCbjGe5KErhAaGaE1glnKUO7ddC+3ktx07zaZg3Lb6CPZzoSmNVQy10RzQDT2cl+bGbVNzJuJOQGXeJITulBIXqYlxzxaKMteWpYSAJ/PIskJvVmjOSR2Ina8ByCxBYK91JyN8K9o/rIGtrIpkJtWlqHfG8bIDz9InmjN6ihizctOwzQWmSMDiLkFfmANFnN/H/MrihnR1wKzuIcLNFbqSi3FSl35UASHBGx10L4h6chXYkUe84lkmPPm7GfkxUpxik/X1co1bqPkx3oLIvoPATXgDUrxT+ib0Mhq7zjQrWerQl8bRY0vWd+LDgddspqtlyW/fk+EbsU85amlmKd8JDTAJX+Wmpz2Ant/GSp+GZqD+6JqJdAZcgr+RsLyoSKNYYZ5tHGUL315rZm46M/Tl6fposbLZl45MBKUzbzMU9A5Oq95pHp2UGJzT1/f6BTnrqvqi0V2UrNjHAVb2C4Q8+/3JOP6zY1ZxXHMzNXoWhozahVK7xDi3oW4m+CZIG5ucHNAbhztkwOYmclcRMyt7K4A5grHlLoLmRW6JEDqShYsdTN8xHa1uMv+QOrmlcxiLtfMWCMNZ9ZDNHMrm2nNkko0s9h7DA/nIaiGeYh+KuOFcK74ufMbmfIrHpdxCvGP/GntvU/H346H1na+Lf+EKcGWitbOp8Xf710a3ycu4vv7Suw7olX+s5e37uC/0bpjDVzGFkCuMRMnT0Jv+QdpRrBmT/JRdBkojljNHCkm5hZ4gs20mAf6mF9BZoU+F5jFXebjdoi7la0LWFvlOubcpAu5FXoSPntrboJVN29NLcXacSVwlOX99Gl0XzbgHOsKtDpsWaxDiFR0NeTLrtfH8xX5XvJeqjGX7g99Nefme+P9+p69jPpzNLzPOwxL0eENgdShmKO+CkbCcWCfEMFXruwErRrwLgIec46SkJ3DcvAE9DBxGXbY08OEMQ32upNjnk3vrFLIYv8N7yoeqU3rU7Wdxr43iX3Gh3PXM6+X+7+W+tGX0j7VpRPaP3Z4PXV69e4OK/u6zExvH9qgktsHrMeb4TY207KZbB48923+J0u3GBrTWIEPvcVw7eO22Z6I1pCYwR6ZFyoftxNY88caH/NoYm6B79mukOtn7ijXowKZcQwt1OhTaAwRd0eNRBN3EXG3spsCpK5xDKlxDC3U6Fqw5R7RK3ePK2sSKm4QfottTLVR3y8nlk1sOOzql1DPcihKgE9shNbrtzTKqdYMRVBwXh6ZLtCLNHoQmw6ZICYfHTHF6D4AEDouMooiFe3uJDbHioJEVJ/dZoHeN/yZWhsguhxCVp8jTKHvF+hT+G/EvcadQp7UO1MU1pI0CfTB4fuRW6ErgfvQhQb6C4GeGSkm7hZ3FZtpcUc0+jmBHhp+GbkVejmAxa3RUJjalR0T7lDcwGHDR5mCozu1lB2KT3Cxat0usbcJvjMjDsnRCoMC4kJ9tc08IN5evwpPimhZESs0EiTLhWIevQArfy3G9iXsW2yvExZ5WqROsI9ST5CdwOo0O11iTMY4sstbB6HxaO3XK7Rb675irSNytCy39rjhMPZytLbIK9AiLxSW2g9H41Ldno3tG2TtQhx5Y3S8rJqNtWKbUT0nktfnx2HccZlGF7KrfJYyGFeoJIusi4jc6jtX43fu0uPKPP3Igu1uN7arOopJLYvEv+h0QZY/FoPM0qru5CFABkTuHM4VP3fGo3KqIP65Nx4dHRWzhLujYsYwOjpVlI7ufDvK1t2/T/SI6MnRjHX3Ph19WwKWRuXkQX5iaXSfqJw8SIpvBJTmDWYfWtmjPZu1BG0clATY3thzP43lcRTxO5L9yOp9HpWi1rTGTuEaW6H3CPA2MU+fsgaj4kZ9PoN6u6DHlbn+FQu212K7kqWeZGlmeazBehMMNP0KB1rvNx/PLEnyKZogsQ7J/ZS7bzgPuNyxMSKC31BEcA18yqZBri8iqGc5tBJ/kFbtaw6m2RZt/QzSWGSOZBFzC8tn4y3mch/zK8iMaGHBzOKO+7gbiHsjWxUQx6yO/iBut5n8LvFvhE8CYgjlmT90DNafwCqGaB/1+omfErDzUOzZR+g5tI+dFRruB/C9uyR/lraPW3pcWSFRcaMdHIB2sLLHlfn0kQXb3Z+xXclST7I0QxtrsGQZpO3jACHLfzkgC9rHy8ySJIcpLNY8ROYG3csLWaNleUN1LzHrPvZyF41eTr3UqfclOtPkbiTuJrg6iJsb3ByQG2chewQwM82cWiwrNSKzij22AkiO1GxZFUBxYPte7i8S3+MSXun7SNTrPj0u4Wk8BkjeDHey8Zbkw/9A8ua1LF1yiu6OFZJcjU++UX/jwfiNmT2uzP0v2ndV7bAZ28eKnhIee3QJgMSnFoeuNfDHwtfYjvua+DwbteTtAZ6kv5IcKw58wY8F+lZ2Zfg8isyXU6y9HZ5kE6w4fr5jRrm+oIhY+56O9daLMTOK/xUxr4EuikARc0euHOfE/CAxr9mb/A1lz8uRWJJ5ADG3wNdeBIp2d/N9zK8gs0KfD8zijvm4LyXuNraQTbf2HvI5RdoUP9+D+NvgY+hrRf5ijvY39B119B0b2Szc37D2TjqKvO9w+oVd+o6N8A76NCtuiZfL8H5h6nis21kKK8E7GbZD0LqLMjYVysQsnU6uPHnjX4F15KbV7s3mPG1BZRX3PO/063uXUEvzzSqfZVe8N3HdvmrZtN9KZt1BFdGzj5wJdK7wT9ItxcUv8az05eMf3PrTacfFBn9WDta4yfHfwy5L61Da1dTsjOe8NeFNxv1UWgJenDjIV7bCdVVlURyjE/WscjOrT5/z074X1qBA77KHRleSz6XcNMmBTKFxzwu5Jys0XBa058WN+DEHih83VREzxY9jJjPvJuYEdJF9evOlLIfsU1XjxDfoFP22OJtkodUSzbCwbgO+W/bW6LKAmH0/fLdobv4LcbeyIwK4sx2Tuwu5FTozgDubGdyReuJuhptZg8U9kBvcHJAbvf90ZjHrp6NyAeKe96mqj6HtdpSI9kcx8xiO77M0+jhAbtPkk9O0RjBLXuQkgT5d6+9Tdoov6ie5R2huzOyE2j5XoxusnR16k2uLHUcWOys0IsBiY1HDYpF7D4Vm5wfMhQbY3LqXjwTMs/Jsbo0uDhoNJjfvJu4EzvEL0uQu9vaMNf9m4k/gfmSBT3YcEx2D/mCXeRb8GrCO6IPyW/s7An0B2GMuO9NbUU41VpTN7nz3VXtnyovk8hUoyVitm2tZvbUWztaSYDU1lGS5Rt9pr2goar5DapXcg6FzLDewkwF3clKr5K4G7Q7fAFsBtZJqdx5B/GRsv8l5BAD7H5Z1YrD/2B7ewT2AtPgwafFG5wE2x9JipqlFfgayKPQCyLK0mOXzieXE3Q4XsQmWT+znmE/oC/KJ7WWOD0saV5VCnTu4tI9yOBk6YkYO6T+vATQwJk/1yX9yM2I62U6W7xScw/tjGcj+HP+MlxW474Bf/7Qq7xW95UPrsL4XlmOozatlXnUv545HVSVRWVQ09SuLPPTo76t7i4o6z3WPwnKiA2RxUcbFObnfb9GVRdXc+r/YV4z8Qw1sZxtCc1kEZkKreyBEoXP0YB3BzwFwRuOzH4bPeLt7eupktKGlPhvawE7QNrTUZ0MbYBO235razZmD+KEaPwH6yEiowH+P+Pm6nQP8H+dLiG0AeAFVyIlBAzEUA1EjafSd9F8ApbIGcr3Zw/Ja6+t6vm/3rCXJZSo7SApPEpDdC7SinPG3dkFRYg6DhDaArzJJLFdQ1LOZGNtEcjIz2RQ2QAUqt626tEoiK/ZSR5J9xMzc9zDQItDftdSC+w9Alz7xTheekvJReeozPUxQQQjjcqJ/+cSLT+XVHgI57X3miegMwgkKrPUDInsISgAAAAEAAAACAADiktOWXw889QAbCAAAAAAAxPARLgAAAADQ206a+hv91QkwCHMAAAAJAAIAAAAAAAB4AWNgZGBgz/nHw8DA6flL+p8XpwFQBAUwzgEAcBwFBXgBjZQDsCXJEoa/qsrq897atu2xbdu2bXum79iztm3btm3bu72ZEbcjTow74o+vXZWZf2ZI6U3p4f4Ck9+V8/0S5ss3jJOpDI1vM0D+oI/rQz9/N3P84xwTRnKQLKCpW87BvgxH+wNZGhqzh74/SnWlqouqq6qMar1qtqqJariqt/ueue4GjpfdqS+9WSunMDc8RqPCqQyM5fXff3FFLMO4WI0rJFUN1utRTIw3c4U/mdtkIGWi6P2mXJH8rc9uVk1nbNwJ4xDd++VyH83lUU6Pp5HGfTmosD9VolBBnmVXeZK2/lCWh/ocp/x/aE/1cDbiJ+jzjvr9FFI5jc4yi25ShS7+MSrrve7Sn9T9QIn7IrtPdlH+wNmFwCIZqO8vpZPYdynd/C3Kw5Tn8H8ZwPzwPocngRPDbxwfnmAfZXt9p7r7ieuUe8YRzNLzRdJdc30pneLNytc51H3FCvmcjrq/vkkDOoUVrAgP0FeGMi1pqPevZLz/h5lSlx7+O2qqqvqZTJL5rA9fUMvvwwqt6Wi9PzFcpLqfvlrPNkkZmicVGKZ7qV2YmP0otelg+ZM7uVQeZFHyAE3leqbKMurpvzrJ2ayK6znY/ckGGcV6acYR/niOiIu4UJ8vK1xA/0Jteri/OT/O03zdkX0cp9JHlmssS0nlJ+b7kN0cHuaKUEIaBjLD8uivYYI/gTPCo0zyf9PVd2Qq/NPVffdP+VidC5NqLHXr6K46za3hKP8y/f1bVPYP6PmNLPR9GazqoLFV0hjLWu6SNhyaLOWy/43l8kIvKiQnkspUusU3OVSO4AQZzWGxPl1iM71ezuU+aJ2H6vkiKrt/OM9ylefS/hlWs0RrdK71hnk9dlGpZC6Yv/w52c/m2S1KfWweLpY/OXtffXy98gvVq7l/N5Z5t1jmXfPnFmWeVb8Wy/2ZPap1W618TnV37tWNZT4tlvnUZDHYvzemxWXrbZHau3F/ulm8to9t0frbemyL1BxZ/2m+btM4zlHeqjxb+bXyRc3nfu6H7C/llckabgtvUmJzwnxns8L6VZpygfpuhfIKZTujn8fZYnyGs20Ny8/GlIHZ3VYPy9PGtFlj/V7KVqXsZfPHZsA2aR6yOVHMR/i/1dvqsL20+WYzxjxidcvnnM2ajWk9bz1uMVh/599uzPxflkObszbr8vrnzzbhBRqTaTB75O/mNf4PGySVPAB4ATzBAxBbWQAAwNi2bfw4ebyr7UFt27ZtY1Dbtm3btu1Rd1ksVsN/J7O2sAF7GQdxTnIecBVcwG3NncBdzT3IfcT9ySvH68E7zCf8/vzbgv8ErQW3haWEtYUdhOOFm4QXRRnRJbFe3EV8RCKXVJQMljyXxqVlpL2lZ6QfZMVk/WTn5Q75YPltRTlFF8UmxSMlVk5Q7lF+UdlUGVUNVX/VLNU2dVo9QX1fU1SzRPNN20W7VftWR3VTdKv1Fn1T/XqD0dDDsNHoNHY0bjE+MeVNfU37TN/M2FzNPMl81SKztLBcs1LrHOt2WwPbeHvOPt++2n7CMcQxy3HJaXa2dD5w8VwVXT1dM1zn3Xx3ZXdtd1f3ePdSj8TT1rPcG/D28j7zLfEb/S38VwMgMC2wNsgOlg+OCF4NZUObw1XDg8KPI5UiW6KmaOvogei7mCtWItY+Ni52OPY9/n+8U3xN/H78NyNmtEyBqc30ZUYyU5mTzJuELBFOkESVxJVk1xQvpUqdSWfSqzMVMquyweyA7LMcPxfKTcjdy/3IB/Pd8g8LwQItzPt7GVCBbuAiNMLecBJcCvfAy/ANEiM9ciOAKqNmqD+ahlaiA+gm+oCl2IMhroJb4gF4Ol6FD+Nb+COREQ8BpCppRbqRQWQmWUMOkdvkI5VSD8W0Kv1TEDzACAEFAADNNWTbtvltZHPItm3btm3btn22hjPeGwbmgs3gJHgEfoIEmA9Whq1gJzgUzoab4ElUAB1CN9EHFI4ycQlcH3PcB4/HB/B1/BaH4HRSjNQlG2lJ2oBy2peOp8voXnqFvqbfaRzLy0qzRkyxAWwyW8UOsjPsOnvHfrEwlslL8Cq8ARe8Hx/GJ/Hl/A5/wb/waJFLFBLlRFNhRG8xTiwRu8Ul8VqEiHRZTFaS9SSTveU4uVTukZfkPflKfpNBMlUVVuVVbdVcEdVLDVIz1Xp1TN1Rn1WUzq0r6Ja6kz5tipo6hpheZoxZavaYy+aVCTQptpCtaaHtbkfZhXaHPW+f2f82xRV2tRxyPdxoN90tduvdbnfJvXQBLsmP8Qv9Wr/TH/UX/d0sCRMZsgAAAAABAAABnACPABYAVAAFAAEAAAAAAA4AAAIAAhQABgABeAFdjjN7AwAYhN/a3evuZTAlW2x7im3+/VyM5zPvgCtynHFyfsMJ97DOT3lUtcrP9vrne/kF3zyv80teca3zRxIUidGT7zGWxahQY0KbAkNSVORHNDTp8omRX/4lBok8VtRbZuaDLz9Hf+qMJX0s/ElmS/nVpC8raVpR1WNITdM2DfUqdBlRkf0RwIsdJyHi8j8rFnNKFSE1AAAAeAFjYGYAg/9ZDCkMWAAAKh8B0QB4AdvAo72BQZthEyMfkzbjJn5GILmd38pAVVqAgUObYTujh7WeogiQuZ0pwsNCA8xiDnI2URUDsVjifG20JUEsVjMdJUl+EIutMNbNSBrEYp9YHmOlDGJx1KUHWEqBWJwhrmZq4iAWV1mCt5ksiMXdnOIHUcdzc1NXsg2IxSsiyMvJBmLx2RipywiCHLNJgIsd6FgF19pMCZdNBkKMxZs2iACJABHGkk0NIKJAhLF0E78MUCxfhrEUAOkaMm8AAAA=) format('woff'); -} - -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: bold; - src: - local('Roboto Medium'), - url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEbcABAAAAAAfQwAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHUE9TAAABbAAABOQAAAv2MtQEeUdTVUIAAAZQAAAAQQAAAFCyIrRQT1MvMgAABpQAAABXAAAAYLorAUBjbWFwAAAG7AAAAI8AAADEj/6wZGN2dCAAAAd8AAAAMAAAADAX3wLxZnBnbQAAB6wAAAE/AAABvC/mTqtnYXNwAAAI7AAAAAwAAAAMAAgAE2dseWYAAAj4AAA2eQAAYlxNsqlBaGVhZAAAP3QAAAA0AAAANve2KKdoaGVhAAA/qAAAAB8AAAAkDRcHFmhtdHgAAD/IAAACPAAAA3CPSUvWbG9jYQAAQgQAAAG6AAABusPVqwRtYXhwAABDwAAAACAAAAAgAwkC3m5hbWUAAEPgAAAAtAAAAU4XNjG1cG9zdAAARJQAAAF3AAACF7VLITZwcmVwAABGDAAAAM8AAAEuQJ9pDngBpJUDrCVbE0ZX9znX1ti2bdu2bU/w89nm1di2bdu2jXjqfWO7V1ajUru2Otk4QCD5qIRbqUqtRoT2aj+oDynwApjhwNN34fbsPKAPobrrDjggvbggAz21cOiHFyjoKeIpwkH3sHvRve4pxWVnojPdve7MdZY7e53zrq+bzL3r5nDzuTXcfm6iJ587Wa5U/lMuekp5hHv9Ge568okijyiFQ0F8CCSITGQhK9nITh7yUkDxQhSmKMUpQSlKU4bq1KExzWlBK9rwCZ/yGZ/zBV/yNd/wLd/xM7/yG7/zB3+SyFKWs4GNbGYLh/BSnBhKkI5SJCVR5iXs3j4iZGqZyX6nKNFUsq1UsSNUldVkDdnADtNIz8Z2mmZ2geZ2llbyE7X5VH4mP5dfyC/lCNUYKUfJ0XKMHCvHq8YEOVFOkpPlLNWeLefIuXKeXKg+FsnFcolcqr6Wy1XK36SxbpUOLWzxg/tsXJoSxlcWgw9FlVPcTlLCLlHKtpAovYruU/SyIptJlH6ay0K13Upva8e/rYNal2OcjWGB/Y2XYGIoR6SyjtOOaBQhXJEQRS4qEvag51P4ktuuUEzGyjgZLxNkAD4kI1AGk1Ets6lVSjaQjI1ys9wig6iicVaV1WQN2UiOlxPkRDlJTparpIfqRNGUGFpIH8IsgQiZWm6SW6VGpMxiMlbGyXiZID1ksBk0tasa+REcgrWbjua9k1ACbC+aMyG2RGONorqd1Ey3KvsMmr9WKUGrtEHZP2iV5miVZrPN5uFQXa21FgShu/bK9V7HCz4/+M4nBcnA9ltfW25z7ZKNs3G89bp3io+47JSdtbHvkX+Ct+dcfK7+Bdtpf+h+/o1trsvLQPQzsat2+pW5F3jvS5U0lhdi522PtbA9L6zn5efGkM/y3LsGAHbD/g22Tyv213N1GtoduwmSRzWG2go7BIS/cix/ameH20SbZFOJQFgyAFto4y3STgLhds2m2LIn+dtsB9i2JxWyA9hJ9fuNXeLF+uvtiB0DCWES6wxgl+WMN6zPWQDCnu6j/sUmGs+LuV1spo2wdRZrE4gkiiiLfNTvJRtgJ9RHpMZ/WqP4FIBQVAv5Qp3L2hFe3GM7/qa/5BWxg2/Iv/NsW7UG7Bzvdb0p326+Inb0PesfeLf56q+7BkDEK/LaAQBJXldHI9X96Q6+dVSX3m8mGhvy7ZdDbXSCE0YEqcn86BTP/eQUL0oxdIZTEp3iVKIyVahGTepRnwY0RCc6LWlF61ee4rHEEU8CiYxgJKMYzRjGMp4JTGQSk5nJLGYzh7nMYynLHp34m9CZz1YO4ZKfMOEQIRxSC4fMwiWL8JBVeMkmfMgtfMkj/Mgr/CkgvBQUARQVgRQTvhQXQZQQwZQUIZQSoZQWYVQS4VQWEVQRkVQTUdQU0WjmujcQMTQUETQWSWguktJSJKOVSEprkZyvhYdv+A4ffhZefuVP3WPRaUeiCGUEYwlnvIhkApOJYqaIZhbziGGpSMoyEcFykZRNwmGrcDgkfHDkP4WQhQ3EQBDE9pmZ+m/pK4ovGh2DLW8Y/0wRrZ3sTlWy/Ut6kPnlj7St3vzVJ3/zxZ878t9iVrSeNZdng1ty+3Z0tRvzw/zamDuNWXr9V2Q8vEZPedSbe/UNmH3D1uu4Sr5k7uHPvuMCT5oZE7a0fYJ4AWNgZGBg4GKQY9BhYHRx8wlh4GBgYQCC///BMow5memJQDEGCA8oxwKmOYBYCESDxa4xMDH4MDACoScANIcG1QAAAHgBY2BmWcj4hYGVgYF1FqsxAwOjPIRmvsiQxsTAwADEUPCAgel9AINCNJCpAOK75+enAyne/385kv5eZWDgSGLSVmBgnO/PyMDAYsW6gUEBCJkA3C8QGAB4AWNgYGACYmYgFgGSjGCahWEDkNZgUACyOBh4GeoYTjCcZPjPaMgYzHSM6RbTHQURBSkFOQUlBSsFF4UShTVKQv//A3XwAnUsAKo8BVQZBFUprCChIANUaYlQ+f/r/8f/DzEI/T/4f8L/gr///r7+++rBlgcbH2x4sPbB9Ad9D+IfaNw7DHQLkQAAN6c0ewAAKgDDAJIAmACHAGgAjACqAAAAFf5gABUEOgAVBbAAFQSNABADIQALBhgAFQAAAAB4AV2OBc4bMRCF7f4UlCoohmyFE1sRQ0WB3ZTbcDxlJlEPUOaGzvJWuBHmODlEaaFsGJ5PD0ydR7RnHM5X5PLv7/Eu40R3bt7Q4EoI+7EFfkvjkAKvSY0dJbrYKXYHJk9iJmZn781EVzy6fQ+7xcB7jfszagiwoXns2ZGRaFLqd3if6JTGro/ZDTAz8gBPAkDgg1Ljq8aeOi+wU+qZvsErK4WmRSkphY1Nz2BjpSSRxv5vjZ5//vh4qPZAYb+mEQkJQ4NmCoxmszDLS7yazVKzPP3ON//mLmf/F5p/F7BTtF3+qhd0XuVlyi/kZV56CsnSiKrzQ2N7EiVpxBSO2hpxhWOeSyinzD+J2dCsm2yX3XUj7NPIrNnRne1TSiHvwcUn9zD7XSMPkVRofnIFu2KcY8xKrdmxna1F+gexEIitAAABAAIACAAC//8AD3gBfFcFfBu5sx5pyWkuyW5iO0md15yzzboUqilQZmZmTCllZpcZjvnKTGs3x8x851duj5mZIcob2fGL3T/499uJZyWP5ht9+kYBCncDkB2SCQIoUAImdB5m0iJHkKa2GR5xRHRECzqy2aD5sCuOd4aHiEy19DKTFBWXEF1za7rXTXb8jB/ytfDCX/2+AsC4HcRUOkRuCCIkQUE0roChBGtdXAs6Fu4IqkljoU0ljDEVDBo1WZVzLpE2aCTlT3oD+xYNj90KQLwTc3ZALmyMxk7BcCmYcz0AzDmUnBLJNLmoum1y32Q6OqTQZP5CKQqKAl/UecXxy3CThM1kNWipf4OumRo2U1RTDZupqpkeNi2qmRs2bWFTUc2csGkPm0Q1s8MmVU0HT1oX9Azd64w8bsHNH5seedBm6PTEh72O9PqcSOU/E63PkT4f9DnaJ/xd+bt/9zqy+MPyD8ndrJLcfT8p20P2snH82cNeup9V0lJSBvghMLm2QDTke6AFTIsiTkKQSTHEeejkccTZeUkcYLYaFEg9nCTVvCHMrcptMCNuKI/j4tbFbbBZ/RCC8hguw/B6fH6v22a323SPoefJNqs9Ex2rrNh0r2H4/W6r3d3SJ7hnrz1//tVTe08889OcCZWVM7adf/Pcg3vOfi7Sb7ZNnb2MrBg8p7Dba2cOX7Jee6fhjy+tvHnmqCFVJb1ePn3qzYznns1497K0c1kVAEgwqfZraYv0AqSAA5qCHypgEZilRWZ5UT2PYsgNdAxLlEcNYjwKajQGgw8Es+JcAwHH5qETLIgby1WDHhpXgAyPz93SbkOsep7hjeL0eqNVIP9lTHKRzEmHdu0+dGjn7sPHunfq0LV7h47daMbhnXWvenbo0ql7x47dmLCSvrRSvDNw6uSa3oETJwLthg9r37v9iBHt/3lj9amTgT5rTpwMtBsxtGOfdiNGtPujmzivGwjQpvZr8WesjxPZUAYhMK1F/0qJXHRyLXWOAx0H50dxboQfxapphKtHGVUGHf1gc6PC6GkIo0NCsYGDIdUo5n9yHFb8Uz0qpyqHT8qpyOmZI4w2c1RTC1d7tc4anqdBGhkdmshNVo7GA2MF8+opFMrXcvAt55yfJNbVj8SKVhCJpBCfz+vGL5mK0yVjQRtLLX1+osicbALyzY/jkdK22by5e7c3z+x5acqYSaSkScEL3Xs8T9l3/Qc8NvUqY+SjNsv87OFG3YpXpZYUzytzDe7coy/ZsiQ4Yuzd/U688NSmCXd17sZub3v7oC2fjfhCGltW8VnjxjpZZy+dWjwpIJwormzTK79/iW/wBAAgqGEiyZKzQISGiQpWr1h4SISYUkm57FNqBQIBVkr3y8NAQ+3D36A4IWQV/JmZqJw2NT1T0Q3QAqTsQblg41NPbiqQH2Iv035kK206mGysZG3YMSs7xtrMDAyhTcjWSC4axqy4LiZRQdFdvnTNq1KX320HjVawZx6SCzc8/UKgUH6QtKPt2PKac4MDleRlMsxKBpFXpq4ZVBNmKyIxHbSvMAF1NBWyAQPW6z3nEIpfMhe2fL8kuIX8TClDEQQX6cwueUmTlNNpRPey/31uR/D0LuH14ccWkqFs//wTw9hv00gu+7IyEr8T3Cw2Ex+EZHAAktOEiPrIJO5s8hWcNqema06vU3PT02QFW/8NW0tWfSM432N9SfA9chuP5WOfkxnwHUgggyki+HwUXGw8M+65u8v3uexl0v7FyJpdaRIdRN8AAdJ5nYKQIGi4CB1U8zNNoUnPR3X1LjTb4EsQYnsMWACwJO6xk7e4bT/99GX0N7R2ndAo0jMzAOfHN02cnKkT94fv09bvr5QLAD8UpuJ51ev0rCK6SgOc3gCn19OKL9lADWokUbkS0ldBzwNNU8HdEjRXVGu0qPKIei288y5jBN59h9Cfl8yfv3jp/PmLaAn7hF0izUgO6U0cpAW7wD7NP3vy5Fk2o/rUyQeieM4C0DcRjwS+aHYSJiRhdokFkVRTjNUkvr1gffj25dM3f2ZXqEN85awnGncAgOhB3A1hQDSuhqG06+MGs+MEg0I21x4BImqiqcGk+kF0sY1xoc8M45pOL4mpgk13GVCnJSTTKXr+KSPXFgybNz6w4msqEctn537ZcSt7XKC7j1Bp9YE+E9bvXiU/S5K+eGzlJwfYcRkI9MM9smOuzWDV/+9pGmaYlnq9hLYFMjf0Fje13Izl5ntACdyDxkxTg0pcymnYlcImJDTWkK0ZcHQO3nrRBvWETcbdrEfVuA6VHa2IuhjrtnyGTjYeWzR1zsyJK7+iMpFevcjmTVuxkH176VX2rUy/Wls1d+3ilceELgtnTJs/d5R85OMrL40+Xdyiev7Ln15+Uh6/ZNmc5Qsj/CwFEIfj/jeANOgFJknoJonXwOrVZBeho02iBmkcTDlsEq4XIUsyjQo+3p84FpvOj7aLuIlTcynCvocf/qlml0xn/1WziWySrVR5nj1BOt4mXPlnKO1Lm0d5sxb3wsB8cmFylDcEVyexVFLRSeV8JAmXnJAllfClLUX8xpYRRhu0x6VoUYM5CS4WP7Qol4xGbc5ACRJ8Pr8v3WalWOW2FIsc2wbl3kECqXmlRfO5Xd/44pfPn2a/S/TjFRPnLl42d9J4O90m5J9jt9zYlFL2x6eX2A/nn5Us0xftWbf+UPvWQGEBYukSOQMu6B+nMDE0VnSsHA0kECeUCrz7ItigIy5ra0J7xQK3tGcqRoQsNh92U8w/JhEZmLktBoMe7bO7rLB0epebg632jH3uY/bP+ffYx6T9mVGBvNsWTF8WkF5wOh7Pcnz4lOJvxb4//z77iJSSLGJH3RhW06N96dRHXn5ww7qD0f3pDCC6cX9ugKIoomQEkXw9VczkxNMLnBCUCoruT0/3oxKL7r/NJmk/p7m+evWfGuE78Vt2lRns9N13kx40+4fnAD8CjMf6NcP6ZYKOq42NrmfDJWy4Xj1P+cEsSLLxkhUklCwkOAq4oqQVOOpuIs64nGxq0JVQz7ij5o27pAixmy+WM/67KC2ZsngH++XyNfbLtqVTF/36ykt/vrFletWG9bNnbDTmjRwzc/aYUbPF4lnHCwofXvLa5cuvLXm4qMWx2c+eP//PkRkbN1TNWrWa/j1u+eJJExcvjpzFAYg3s44vfRL+t0nkS3xjCynWFA5OSSRLynVkyecXVH67ol5PpINovJ8YLr/dnoHXLW8MFxXW7i3ZMSj8I0l96SOSyi5/3XNvxxtbB5aMDNy4dsmE9UtPPfNIx46difLpNfI/7DL7kp1g37C3GjV6NCeL/NStbO2ps2c2bD4CALW10f4qDgYDNPymcCtU8R4uYw/H8WnY1+/HcReOEKGKyJDmBj5OcRwItIUhwnqhFpJw9xFg6CkFlTYXTfVqZdf/tfIcAE0d79/dG2EECYYQQBQCAgoialiVLVpbFypuAUXFWRzUvVBcrQv3nv11zxCpv9pqh6DW0Up3ta4uW6uWCra1So7/3b3wfBfR//rVcsl7+ZL73nffffs7HTFBR5D3WpvCDmUdIQb1I01myQTjoQl2MRpRl/r3hG4oVpCF83Vw+kdwei2j93o4WagRrjD/Nw7YgU6IrsgAfQGRcYCTLxUZur5kPuL/lYuuNgU1XoSa+ueEfPon+J1yrD1J7UCC+5VG3BHBHVHcEcUdlSGKO3nPyzABMdyNFOv48MTEyEXCyPp9KK85NAqGGrz6I7y65gckiwz3dgAI+xivtAIDOA3LqyxbS9V3By2ZYgWxj1KxdrMPUEhIZKJWxzrtdWqXG6lJNABmTO6TO6EgZ/pvgvDn0c+vb5z6WEvxzh24q2xeXq9VAwomDR8q2098/X7JuWGdhg3GY64xvHvgZPkLaR2wgixCI1vHWKJpbdGx3G7mDCO77O7d6Eeg+9T6IJEoXP9qW0dDeSvNbVsrcjvaUN5aC9pa0c2ZWrhMKvyhjOgmkGUyEsFkpRLVKsh0dyc2B5YQICBgIe/NBCIEGNktqHxMBISRCV+50v3qzz2L/GNX5i4ra+5/7cXJK/oKktUtLnpWmZsBf4zfwZ/i9d7NYU+YMLgiIyLr7Gi8AA/zaQ6/hPNgCdx2D3ukdEseEwlhjDkuaOZ8eO9b/PGA3n2za6oggAlxCaLjSGGvi6/CKXAHfhxvwhtxbhtLaVQsrIM2+DLywL6O+mUrO6a7GfRIcPf8hNHZAIBE7VQd8ASDAWfec3ESdiGTC5nSGsiiwiLUtMnjuEOk1kzFcI9JHoR5kz0Y+SwCsXdhGH0VKhzHp/+FzFeRz9+O7fCtL2Q4AL8u2e72RcFosiLP9wIgHmY+hxmEgGJg84/lVDxnGtpH+FMziw5T/GGx/Sx9V+NPbS1/uvSGcm/t5vGnTEK3rUG9y6yEYO1+tfpYOon3TSpILhmHhztfw/bCn2qhobiwdDW+fQN/CjstfKZ4Dj4A9dOWrFx2S7KdOD56V0TLD0s++Qptwe2eLpq+6O1Jo56aACCYSGT3GbIfW4Kuj9KLgIabbN50LDdy1C0P5CSL2U+190OAThfGG/zHkIjP1Tfgj2ByPUSwrYiu7925+a0D27bugj/KF/F1OBh6QhP0gEPxrZ/ljc/fsONrFTee28R4g67DL2Qd3IERJIOHLwGln4cGSUJdTxdyhgDi1AKL4NMYAdkLvyXzDscv4Os/X3r77Nm3JRt+Ef9xEdfgl8Wb97668d7lQzcAZDjMIDh4glxAaHWfDV1JZj/rSS1tOuz1hHmUcIAjHG+MklgeL6F9LCbnn+jtWIJ+rI8SzjpaowWoDFuPSrZKXAiAE5+ZjCY9wHwiifwfvmXsI9wJMhnuBBn3B5CRXWYPc85tcJTWCd84gtBCVOTYSOfNYvNOJnxzgfBNCMgDJG7zSAeR2NXUTWzOuYmcC5VObFq7NxloMKYVZwDIYliIk59EGoTQ8FMi1WHihc7472r8D34dZmIIYUsBXXXbuXHroZP7iteG4MvI91jOCtgbusEO5K+347Q8e+MPb+JPbT/Gt4ZtDjppKBnYmi4D3IJyT8WxGL/UbqKsmPH2vW7kQdLd4LSKMre9bogIAvLe7u0GiyvOul0mNypGuE2h989SwFg6lJAPH3RNyQJYyWiVDLWO6XV1aHWtQn/HIrSI4vwGGfYxf74lFwHn0WS/ZYX76uoIKFu35IbrwlVyYQCxLpa96kTTx3OvJq5zuRfv5Pnw7hyqq8P1Z75rABK6Pm/yyAWS7d6fZ34//7k8f/ry4ka6xjKbeygnyTXR9CbFOhNBTIUiJtZlQleZiHWo4RgPKCvqPoxRivhqEFpQ55fr6lbBkzDE8TtKxt+gmY6VhGRb0QTHkw6dul8oThJo+wjtwodgwulWsMINaHf91LqjZPMpvyPTOJQPmKOhI8f8PFG13EQvVGfduUdgdUUc7AqJkgqDxNrKgaMhs+eobTNFT+700efrUV5FO30KebG5Uc8EWtlONUbCMKgzknfwPPyXDJ+HyXX+Mu77L9xf9q8jy7JPHHm3L/wDzYL3tomF0LEaU3YHPO9P/D/xPpFcNlR9sDfKQ0VIyDvYAkWjZCRQzAmOFb5urd0QeRq30fSlk1sX8kKZEurossFEhcHnyoTDl8u1YiS69x3B9zwSWwMExpGYerP/TAzKwmQIe+FjUFIzXI7/xHfxIdgdStAT9q2tfHHfu+/uf+kjNJB8sB+OIDdl6AFH4n34L3Twt98O4jvvXP/tEFB10nkWhzCCLoBffFVBMRMFCoqJUu7Jo9qcQ5WQhel6UVXuFrihDj12C/rgmlv4Xfj4imeeWYHfRW0c30q2f05/8nfluilTqH6k9PKT+hJ6GYEFpCu4GMj0BlevUyth7YJ7K4qXwVBu5hBhkW1IDMiHUy53QO1z+HbC7IyHkG/FrwOur4fAz/Q/oGEDoWEgCAODHkFDdtGcXDTnCMq5zh4tAL0r8H4kpavGhqLpIBNRJVTz83QOvA09Zkyd91RIxN025kVT8WEYuGH50hX4HMp1PC/ZLpyZ9q+OkeWL52TMDTFb1nadMXVp5dSnJy9Q9tJwohNfko6pURM+HNWSXLSkiJtbsnyG2TXfxfFwS0N5+AN5LeLfk+CaalbRx3ANsgkVK167jf+BYVf/gGESurZtzbKynQeu38YXb/6EX5bQb+9sXLEFzhw+vX3GF6/ZfsL4bXnqqum5OZM7pl96/eA3tz6Xly0pAhAEAyCWMjs8lpcL/M4jdosEtVlJxXhgirkUP1GHnxBHE/PJKN6sVGi0nNDoFpObCZzc5HQCL2Jc1JAPCxfF+1idfOgj3sJVDXfxqbrX12+xS7b6DrXYAcVbQnV9h+07dmwXqum83gBIErOT0h6ti1Svgj5NhjuVyQPgGCjm2X0hcx7M1kRooc4DKgqUA2AuFBx3fnH8AwW4oHC0GH+3L9MPbQCQf2TPuZTjaH4+bo9y+oEPGxL9IFfbfYkSzHAPk61ylpwjE4wKyA1qmgtMS6QQLWHPpkMRHYZTpdFCH61HFGtTIrRCc6KRuj30nxUBCMOOwggIr9bgFy/iizK+cAm/VAOXIklse+9LnYfY9m5f0XTvOnueTgCIvzM9MZCzvDVYu64bu9CRCx3brjqoeDokgUJH8jwTKfoEd3emyyzq/2glwTUEZ8DP8AVcRf5dgafIVSthCwp0tHeEojDHRXQJfU7X1YvgdY3g5QZ6cnhpZn/AMhdEigqdGRClC7oCqqHAaIAYNrITG6pOLWguHAm9sa4We0NvdANV1WdjiPTC83TuIWTuaYynHgfcdA+1JewiQCzqxW0bu7vEwj/M0IinwRkTnIPu3PsFfeeIFu4ePbpNHFi5Qdk/S/FhFCSvBTrQmuaUyJS8Jc8JFaXYgdrxKOiFF/B4uE2q/ueVI7rPld8ykZxQQWNOCMVqtyP5KmUV0w008gZRM18weD0Rhy865yaANFUl8m6WjsuY0hgTKbXQ00qBl16S195pf0QeDCCIR+eEeMWP421XpZaC+eZCZJgOCp/C6Ndg1Ccv6GU9Ooe+cbSFuxMSGC5CQ6awjXnnQZr99YDpJtEo17b6ScLmDz5g3+srHkZm6TgQWX5HiRfY3yJDRTCIBYg47TQ3EguI536ZvstWkibUTqdDOh28yXA/rXTQWwwWY0Uhj6GeaEHmKuxAUC8ehqKsxkeh2AeEgGiwWcE2gGAboOcEjmscwUumaSUSSa34wOusF7ELa7zgtAz3Eq8yr71eb3mJxRXZXiO8iEdB7xAOrvFq8ELFtgBOj9h9A2RmQvMxZC8X7WKJUKJJLHRs5YNnVN+bw2mwVVE5gqeXj9DpX4WvvH3n+yNj8nJG/QZ1dZVHfm3u67iSu9H/o4mz+7XtE9lr3Jvbdr81YuDIvunyouMfVuDgrHnJb+Ym75vQPe1JgMAiQpME2R/4gGAwUKMtfbWiT8+rG16i0GSJiTelgngLhgXJdNQ9YHkGH0Vr6nz8lGBEwsWThZs7+Z+p67Q67/TFuukL+xWFBE/OWVgM/7mJL/fPXi37O17q1oPIn/pXqp/IwJ0zu5dvpTzUj/hQf4p91JiJYsfrtbKdZ0SWuhGqaWbNl47lZtcYt9XsR7Q4IgYJjeapCp5GttOHzr2AJNzwdk1DQ01lnYguzsh/trj4jQnZ8rYLMO5G2HUY/+Nb8tD5J7aEbT9G+S2H0FbgacuI5qslp57XMbyF+N/R1mhgQUdaSBWpROetTo9c8c9zLp0csspad8Y/bkPBiUt1Ty/oPSk09Kke82eiZlCAqd27oJx/fl3eKxuG3thi75IKv03J+uxltleGEtreEbOBH8E9T4O73nV7BAEdZeygWHtZEPGuS4LKSMkHZ1u7BNV0LmSXQgEhNzCTBJTJoqM8wQKmAuEQs4Xmn/pexTXQ+8x31xx5SF41b9TqzD6pp/YPm94MwTcmmGDMjTY3YCLEf18ukxY/3yFmb0IPYV/ZZClgXCmAIAoAdF6OAWYwABCWeJDuRnJhdH0qSmjIJwC9ubggrebyI0KSVbDRzapJptHE5dkXXqi0hT0RE+DbMSg7+8IFYXnFwgNHPT0Oi/KwAQsr6udSGg/APUU3xr/RYAxwRc2F4HpyofdwXgSSi0CKp54PAwby4oU8RZsm2CVRiSCw7A2LuzXFOgN+OFmw0ep/CuOb2f/uEZeyvvfSudZVw078UDdrQZ9JltBJPRfMIVyEYFpOnzX3jn/2U0z4B8Fh02ZMycwi3LT5QGYqPJ+c9flLAAJilot6sg+MVD+rvgO/CzihojXInKuh50RKgiIQw3zY9lR82KkJO/Nf/6hu7Nju08Lr6oQ3ew0494OjCG1eVJwcV/8rmZ7x9ToA4BJywXI2Gq2nd/VxkMEmqbVesraew1m2uISWLYqdoftXAKAGG+4J15Lf9SZPmcFJI43RQ5aP2xlEDvmoczRX56C2taxZHx+WMFn77outO4c08+lkSut+k858b8WBSjf3o5Ju4DBxDkMDQLAYADGF4KGn/K5OzFVO6h8d63FDSqznvw/zwCtFtbWF0Ae2wjuJbXEVnsORsn/9UriHpBTszLZR6c3Hx3ybjo8RkrJ1YvkvIM8geyMcjNY8h15r53Kblhej/DZRLsLIRRgz4vk9E0xtHTPjKLMLX/nyPAbzveL3TZi4LaLT85P/daRuxIg+T/mjuoL8HuNakeVY03vAyJHDxl7+0TEdrVk5dUB3bz8PRxZas2zGY3H1V8XOynMtBED0FPvQvcA9F/covAK7n5yjFyIXDlRR5xHNbRa/v/CVI3WF47pPbU1w25WT98k5xxD04txx6Yn1NQwZRT/FEVx8QBhIcsFGTR5TDerHW7bBfD1eIpnfTJ15HWHaSFrPaCZsm0jj+ZEEIx1RQ0uX/3xt6bJlS3/5ddnSurTUJSXpGRnpi0vS01DkrZ07d+6oNd3eQXzEuj1jRo8es8e0c0xhYeEOhuMiPJLiqNWhbIk5TuCkhwdvrPxP7RPK1+Ym7ZO4S8dz11rrPvGP21jw8eXaBfN7TQwJmdhn/jz4zw18qUuGo046/0yvvrgSO178IrMzNj+W+u/NjL54pFDvxL3/o+S7qvI9XLj4kYir0pyg/hDln7/OGnSsrtMzg5ny7zEuNHR890bl3+fJJXcjkJyaRpX/weQkeCch9auXnXsPvUPw9gbdAC82VEWkd42p6g022CjAKkbAKTSA6g71itCIdMpo5y5DO8d3HxFYd8nQdvEAvwiDMEJMSXQYxM67c/J1EoDUThfOkvkjQZnGItW7xm8EFr+pGCpMEIjZPVNYTl6U6qGKF5sdbEbu6ZsFkRf7oGbEWTA1g9NYcIenqJmL9dhCq+1DQ4kTIoQaQ1Fe09EfZ12Ha/SHJYETrYxp0JWRS46euHr4+DUS+hk7dEju4GVnjt069sVtGf0gLsrNHwsjknoEtd1a+syHlevkrJHZjz2WFRi1femGg9+ulvMHPaHICnPDdbRAygRm0E/jU1M6qIUsetcINl/YRG1cN+6BaXWTL5V4PtRMUfjFrLgcVKv5wDePHu3cwTfCJzB4UPvl2154QcrE/1Q4Xs16TCfbfYy7X0aDKqBOwW8ekR8eYmcmy3iGVrU37zloTa6m9Hq4ExGrEzGqaYVQ666xb1bV5uYNmRVa9+WeQXmXfkMrHLPWFqenCM3uHQcQhAAg/EnwcAddeCnGMS/v4iESE0etEalOtqIslINICfNI5IwrKdEZK7zTXDZ+cw8v+gIvvAcnDxmCztw73ijHwwGQqsmFASzmrAiNNqUXTdsBD5j5Is07sMBWhiedOQvSvINEyw6IL27vRWtW8nRFOsLTQbp2OppBJ7ds0FkqxxAWInU0nW40G61ikvzKNfztiasI/nQCf3vtDfn7cpgEBXjvOPrRw8PRUuzs8IDobwCBBQDhJnkOT1DM8RgnXR8VT3LXeTir9kC1PZy65WPp4EuHAWSgnwjVdCSRpmgZ5h3sIQ+TJ8rMTzdSM0IQ6IjEj6EZvw7z8Y3PPsO/wXzy3hedgE87rjku0speFIbMCu0NuKdQT3A2gWGcVNVUOel5VtNwAhWxRkrug0pIkSz8KEjQdON5kfIBwU7W2GGJNN74i798E3rgjOhdZa26hbTw6qDvkh3QBs+C7tD+FLp9L3TaPr0biTgMSx4lxgBIdBYQqihv8nvkPxKbKiWFSetRqOOa0OPo0b3om6odCn2S8Da0Xk4FrUBbQMtjQCxNiWa70doHMnC1gmadmyKjnVH4eJaHZzLBpInSo4LKF0aMGjXihcoOo/oNGjx4UL9ReFviH6+dHj/dPn3i6ddqEldbXp5/evz+mNj9Y0/Pf9lC8XgT18KBD611htTiG/jSS7hWfl/BuwXBe4YG71axNj+Ctx/FmwxaWW3Xmf0Y3uYEBV+GPlspiq/VFKqg36IgZ2he3tCcgg5HX8wfMyb/xaPfUTwn7GsXvX8SxXN1Ys1rpyeShxh/+rU/EhU8ZsAl4gUhFgSARGAzECSaqly2GfjqJxb7JTdtAXRHKva7oocjFffQaU1csC0bvD4ncUj7lAGvvr5i0Na+CYNikweh37d+mdm9fbtxT/ht+SSra4eooh6Kv1KGV8JSsTPzV6IYFVUxpqc6EFC7nBb1y5oKa01zVSn1UvBKoQrC60puxFNokCJAGJio8cU4ueUaM/GkG5iObmz0uO+xEG2ivTBV0zGQjuUtm4isKF0/LLjCuoL4+MqTQ+deQsIH6z/+6PTpjz7ecVBAlxoDLNLiMy2v/xoMIz8Pq4ZtQq583/KbLVJjoAUS7QjEiSTfEwoKwH0R4JpG0O4m8ih2i8SqZC2x2gwVLZGw0AIbe4CvhX7s62otmglX0S1oJYwXSSgcyRsDZrIvf5FiotBX9REesbHSczvdf608+5OIrhcNHDTKHS5DQ4r7b+t89KhXef7cyt/P3jxnlycULpn5e6Wy3nkNP0vZ4i1WsdoeECXPB1Uj+QLUmAe1Z6QuUik9TYxMdNpbiWa6jZVEoi+xGZvHxxGTF4mpvQ+NKXyn5+I1Kzpak+LXrVnbw1Yw0t5z/dpN1iRr7Kq19bNrXnu1pubV12ompXbJTF267tleB0YVHsreuG59Ykpq0qb1W/v8e0xBec8169G8QxhDdOgdCBqUPRQIgPg+2ft+YKqyJn7kEfy4TGIzrUFJVYm3UYi2Az3d2OQ9DfWSwWZk7Gfk61bkaqYa6VjeTHPfw5k0sJiUf6SlTvkHLegpmAW98dPQF++Go/HuOrwTFpK/YDwNGoQOaJEjofLpyps3yYBOsbV4hsivIqW/ka4F4KuM7FDZezDWLsmAvpNiK7ylYAnRsnCy/ajF+8zPP/+Ma4UW9T8LH6O/AAK5uLW4mvCqldjWs1hni+qb0t80u4c5c5Kp2tywOVWtjHexYe0dwpSuLK5Nyt4ysQO9G0Z788hYHt1kpTJXru5s1yMjTW6KvHkbzgLTyntzAgUXVw/tn9UV1/zyA/6UGLmvzp27evl7tT8P7p/VBRqv/g71JMe5ekHp0rlVt392fBLVJzwxfv7R+MdDElOegSfyVkZ1Wlnw1vFT52U4d/Lo3r2HJWW8++aw1e06rSp45dPLJ+XC5YW9Bw2K63KonUdAM9PAzkOHJxpMnn4DH+tboOyT58WfhDnOtWnFMjCwmppROrVc1VtHDH5E+YHsUon8CXNqa3HQrVviT2fOnKEZi8GkruEHqQq0JPomHsxQ+DSGLEVMI2tayYWV7juLeJ/HYkjht6hR15ZISmox1u4ZaVFaRu0GT5G8KzeKfIWeqFkgkXaTskI9ZvO6+BTO6vtwpV2H9e4ISvKfjeIgJNp27ztyZN/uchFtGjYsv7Awf9hQhzcc/OdtOBi/cvsv/OpcuAe2gZFwDy7A5/G3eBQaIG/d/eVbs974eu9mOX/gymmzn342Z+QyfAdvhROgG9TBcXg7yVknQxvui4/hKtwH2mkfAqoQfFiNWTR4i1Zf30+dUJ4tkWnqhg4hZKCKCFSz9IemXlYvs4phfaz9sp4UZQXrY/WouCJdn61HJJdyRn9Bf0NfrxfzKjz1LfSImI/6gMZ0iforzMmMaFzfDPcPI6ojrkT8EUG+BSIMEWjaQeVamHaQXodECMWEvk1lVCKbzqigkW4egmVKn1mlrzz3bPJjXZ54Acqvrl6+W98Mr7BOav5Mj5zO6KgpNjA2de7EKbOtaZlxsV7yqNK1y/Fx65Co0s5hEzLaR8coteujwAxhlrAJRIDqvy4BHaiGXRsuAQhK4EzhqBAOJNCccm25IPBZQponO/qxY5mQBWdC8TX2W86+NCTTqlwgqnzrCcygE0gGa/jMNl9j4i1y/q5Jw4MB3ibW8BtbUR1wJYDk3FqYvFlzEVmlFiTdZg1oQS+tseX+mm+F+luVNmFbdDWpvKZNSJ1FbVhCw6dGDf8qpR9+TZV+RDZ2JQ12Zdm5WoaGh7fCgK1vpianJeo8drqLWb32lHXN71NQis7xPAtTXHj6DfyW0H9ZSfKw4KCneia1zTQZTP2iErp3XZ6a+ERnpq9WSM2FfCZPDLSLievSpGuS72iLvpGa76Gyp0SwoVXSMUb/ni60d1flz1l3wugfuJ91RySF6U52ByBD08vBtwwrkQRNF1HJzqJJ27dPKtq56sk4a/fu1rgnxXcm7907efKOHZPjuz+ekNCjB5OJIxquCXWSB8HLG3SluoWL4hHF0WQXpV3ycle0l82LU6Z8eyUkI9pFl+IbvAOO/QaG1x8RsoSVJ/AMuOoEXHT3chWl41NoJ/pKOgECwRjXrgKVMm8B2ssAYLGS1Z1C34XQevFAzV5H1do2A/SQTj6CFWyqy4CkjtBXjv2wY0Yba0JqxttIfn39qp0FsxcjmI92rocg4fG27ZJSOsjj1pfO6DdzwmQZQDAKlaHrJCcdBT7URBoJ7uUy0liItFCCjoHqA10OJE/wViD1UwLJAwXTyyl0KKNDOh1q6AfZdGhQgOkzk2+Uh2qkZFQosyiiyP6LgsUHY6PSo7KjBPKVKMJK3lHBUURmXo6qiSIC8gNyq7ytZlv6to2i3w00KAHtTk0QRY1SaRsB4+H+zNTMtPh0SqPSza93T328Z8XmFYdk9Ha31Ixe3bvNE5+O7xAZ3y5UHjV71uTE4QH+I7pOnT9nqhxtjYtJSlyi2HuzST7/cWc+n+rCdJHab3RooEO2SLP5IqULeVdBE/VE3rxFPxpBB286XCYf2cD9fD6gpQACaxQw05Q+9EK45oh0XMb1bM4NJDYczOIAOeAh4XMuDuDhEizjC328XZtzNEEopkJYjBguHVMweErLusu6mFk9U0dH1JJQyqaXZqemCM3vHR8Un9AiCKdJ5xWapAEgTGU1ia01cdQHGhUQUFxwstVCAW2vsvigBTnXsAMK1+DjyA0Kn52F0t2+7Df3of5wg9BFkVNC7H1yKXYO3FBbi/r/ocxfhDPhSQLpDTowf9pNZdipLAwgcnHCZqLWl3AyS6RiGibCNM+MQa/u1qX17NY/REjw7N937Jxn28W0ay2tUuYajLbDLUQmSqAH3wf8P9j3XHewTeC82LD4cLjlwxKYjrajki1mJudmEXuknbMeNQOQFeREsL3Eg9ojdAghA033uB7p8D89p2HW4T17jhzevffIW0MG9h8yNGfAYHHmpvfe2zR986FDmweOGzdwes748TlMR08EW4VVAjE8wGd+AOjAZ3Aqu28DQLpMdHUkOA+Gom3k9XPoD4heAt+gdwEABo5aBB/lOzKQqhhsOHBr/C75zjkhmn6Hr2pk3ykm39klnWDfOcu+840wi3XNfQsMaCf9juposO8ABEbimcIXYmfWA9YDEEl9v/NL///p/JJZl5eye6xO+zaOdYPRQ03Q6yh9ct9h40f3m45+E+CfH35xfcO0pGDS+oV2r5ubm/1sTsGkXNb6dZi0fnUcPhjuvsZsKqUnSReKIkBr9mRZ0APmAndwwEsSxWjySCqMRYWZCT+CwymMwRWmuwpTBV6BQylMM1niYUarMMfB6/ApCuMtu/yOlwozESyHecCbzEVhaCzIi4hiLe5lKuwxmAEPUFiTRGFNylEwzLdp+AsA3WDJxnLJW7iqz0c1PwiiMxRkHyHAPJdOFrsnkJ2+CSCtMNpQpw3wLrTAl2vINGVgL6LueAodcslAO+gF8o/aB0b2By0k/Dy4fqE39ngHXyJ2wRXHXB/U2vGTL9p69yac00JS2rmO4fHHcAIchxZAoOwbnEr7nghdIgDdN3PhkYZ6cp/197C1bqOsNahqXGuZ0V+F6a7CVIESZR0NsguMlwozEQxvXCPZZY0avqC9HGzOdsqcDUuUOSUJNf7eGwCghTqLCjMTJCn85abCNJwjMHMZXgpMVUOagpebrMK8T2A2MrwUmIkNgQpeDIbWKUmN/ABaKzWzTN7Nf8QpC3ZBAk4WuExYoOKscFkgWjZdoL1PAlXFArUjhGABFZcjQSP9q12LdCSuL4haW4GN1S5q05bRonZtERvxyPbt91u3WmEHa966BAW0/lU0Q23hQutxR9bChfswmit9D2yfdXTus98b95nOSSul/0CXSGA6Ofe9H5xGYYIkDx4mQYWZCT+BUylMsCtMrgpTRaT0ZArTSnaBma3CHAdfwMXsd1xhQlWYieANWEzXLoTC2EIMtpbOtYOgN/hauCEuB55ExgYQx8K/QoBG2lEismMPdGykUSsjhIkQmiHUQdgbpuCqTTAZpmzCVWzAx+BTsAvssgW/zwb8/haYiT+gcwgEn/2kP+N3EADCCRUH8B0HfPywPR/ADtWGjNqH0sBbcGh7+tJWeYlmN5XWDVbER+ND1LdjiWdqJEDiyJmhEum2EFMhEvppGjr6b0wftKk0bwztSih47cn+m5b0GVjfM8wiwzux07vtexdV+ptk7BOZH9/Y59G69YaLA26XKW0KJAp5acD3i/Dd7BWxUBjWpt1vB1OLomD9wRYtfjvE+IfVsbO1SHLyhlnZs0bJna2XCmNRYWbCT5U96+cK012FqSJ6dCiDkV1gvFSYieBNZc8yGJsfkZSqvGf10GzOFOec65Q5vSSFrwECmwjMQtaXZQLZfBU+Z5raIfBwRhrdPegOp64d5OpAbO6urpuPVWlfoQU7Rh+ntQ9X/FULvfGt2r/q6v5aQf6TbPjXusqqWvwleReOA1eNHb+G8e0z5Fl3ysEgEgzSSBxfrhrFtbVGLzUaB/4avgrxkZh7SZqqXZrrGt1dky8wcQVPccQMbvRf4Nzav069+t1M2PX8sf6vRHRsOy8tLx+/t3BE+vApYrcrd//9xrSzaV3xTysrKkKDjgW0yeneC5rWD/y8Z9+CTcuUtWB1v9IVshZdnbpkMQika9FODmBrocJcVmFmwiQQQGFiXWBkyQkjg6oUM4Vor1MgwH0YiwpzPC2K/coDMNJpFWaifwvKRR0oDD1eK6ZaO19vFadj4DMwjULGyxQy3mBLdsoZAcQ1XJeXin1Ae/AY6AJOc9XNmkO9Hl3qLLBSZ3s6CKYrlh5bUZJelk4rntOJ3shOH5GOpim3iitq0hvIC1GeTRc624PYiy2dO6GGapk2fLdtrOaSRKut1bTztDNfH/rwCB5LcPB1o5p4HmwsIRWvLj2Tlfz15opjt375NG9Q3qRrSK49Oem1pPSXx3x9wzFEEFevGrWw35OPnaqflrWh7ZmiucOFjPHTPRA8OM40NKfHqAM79rzeffi4YZnN5TWHumSkZ+G7P62Rl+xv3/6FmF6Hnux4ZFS3zGz0S9kMqdWEUrbG/XAqrU0ma/e4065JY3YNq6uVvif3n3Dy4hLQgnJIiFPfqTBXVJiZsLPCr2EuMLLMYBgvpvlTiFCdAgFUGOmMCjMxMIhyT2sKY2ttsFkUPmugzbeljB8/cto9Y4HE7B7VXgFlAKAC6ZQTRgYzW4hai4bZT4cJTJ70B4NR7B4LQAxKp9o9+wnMTOmgCjMRO4AMvBmMq92TQvi/j3QTWAhX7wSkxJivPAgOIiaNV5BOqc637/Uil4AOJq8ges8Um2EONsWa0k3ZphGmKaYSU5lpr+kt0wcmT+IaBpkoTEis3dcUwvReiIm+AF/K+zQS1lbD1AavtvRDczBLGepcm9r8CAv6Aqf3TjUjCTpLkYnxEVSi0fwbDceQK2fh/uJRk/CX3/+IL0GfSwO3xon6/hn4dp/vLL0jew7Y1uVsH9x8wfaw9eMWbtwq6SfgG/86ewcfhwHVP0BzepyUvztlS9E82aeVvsqY1X560b3U6n1LO2RUPDvnTbpOrL6QyZ9+ivwZyuSPWSeq66TU/TH+6u/kwT0Kf7WWFSgV5rIKMxMOVORhpAuMLDEYxoNDmTyMeGAu2aLCHB/O8Il8EJ/TKszEeCYP21AYWxuDLZxxhEDwfFVMFA+ynI8nSOXPaFOsVLGaNeOowQRAT5aiXs9U2vvvxgd1w6k1S/7ExHq9cBsvpqly9PiXH1y8d/simY/gNZPUHh7m7Cq+1oQZWa52lcDbVa14u4pdqXaVkTCMakpRHlKNLOtD7Koc6H41fnTME+vGDx+F//6lw7CoJ9aNHT2+rmUrGUb4x7cqWQDrA/1lfNm3fUBJCYqshfFGnw1f9LhWZrqNP/FutuFs9z+29FnUBqIhnl4nd3ad2RY67G5uJ/Yoa8FquthaDHHyxm5FFphkN7ZiKswpFWYmHACYNPB3hfmDwTDeGIIYhI5BaOc6qMJMjGOSgMHY/Gk9gfJbrN6HzZfrnM9fmS9QNjXaUitJLDDtv+tj+U/ViTbdx5Km1InWdVozvOkyUd07jje6dOfrRNXnY3TIVehwl9EhUEeejgZ0zYz/IZXBrBaEr6XWN11LXUpLxBU5WthwXdeDnYMVTmxOEgvlDxhRQ6KPbjD35jxE+wgj9SppROAseUfz8768ojfzRcP+XEUJX0Nssaj9zdSxUE/ckNRiVpqq0/WoX5y7OAvXEx8oEwrd1mYLs+lJHPRUjnsF1sKO8YUd9x6o8PCEPaEH7ADdYS+9eyUurMRWX6LykmS3Tyrxp1WfAra3CU0QsZdCQQdiMc3WnJb1yMYQ/ribBGCk+iCBGEoJZQkoj3tmwB8aF1FNlUqM5k7HatW4UVpgmjZoIBeSVG0aadjiM5mZJxb9iv8mEmHxycyMD6fxLTL3xs0vLSkpWVyyQLjT2C0zetjwUTCuzkSkQuHw4YXaphkUuff4CVJ7ffLkTjhG7Z/ZSfLsKcS3dAOhLMuO+Cz7QW9dsC5WJ+Qpx3GSbIOORGytQkpl2dqPoFuZWO+/alXgHwoflooDUIR0geXNOrL8lKCWDKcL2c7yXe/7kWAiAhovms6OUeKVzhs6eM6cwUPnTU6OjkpKiopOlvwGFBcPGFhUNDC6c1JMTDKEyUpPgfi10E/6GxhBAmAlU9qZ3KtpqMtLe8ugXngprh1kk6s1XQwHod/sYd1fsEYmLJk1LOlAXESSVD1i+dDMmLD8VUMz2jM59xIqEn8WOhJL8KvzIMeaweJIqEhy3rOBsWMzKH5dhL/hcCLDJGDQ1GL6siZQo1UwhXV5blbKRfEALMQ73iPw3YQ7MF8Lz/Yqg4fKCaf59AvSIPwczK0CgM2B78Lh0Is/C5WIi+E7F6Zc9MVXoTv0IPhRXNDz5LcjwEkmc0/CJwEARpceDp3q7xJc0FsM/hSDPwX7MXjed/RQbbsuDWa0HYYCiXCDO8WEfRbO0JbYCAc8NzXla9iNjk/iT2HkT+fIGHsBKP4pbEBdhTvAi3CmXfAQol0j+c/MLhw7Z/bYwjmCJX/O7BG9R86YOYLmJ8FWZBUOApl8L4Bsa39ahRoG46EVpvz9Er4CQ15CEXgaXG6Ey+k8Awh8CxVeovBGaIJhRuEeDMFXXvr7b+EgnmvEc2EZXEfgY0CRME2KBAJ9KhDLjqJLjITmV+lhzUXsEGb2/OmogzCIyGQP0Ayk8/H8+31HdllydzbjeAoaycJYVSmq9XIelUkrnSKhVfCJFNCXpaVV2CrCMyer5NvC7G0221Q0w3EAPonw2/SZehK/4AqZOxqUgvsh/wfKsaIjSTlWbDQ7EI2zs/T8YQOAnupMYMhR53bvSHqcDhlskbyrZ6omd+jR5y1cjWeLSa1CZ3KQGGTsLw5om+os9J+wC8ftWPbY1DjfpHlpN/F3G8h/MOxmyvQs34RpSUu3wzM4Dp6BJ9HUV318jnkbYIuPUOWiSv1x2NrgfcJgPFDcrHKRwj97UJHwvdDx4Wf9Ct/T/DYqqlLWyx8A0cz6CFuAyY/qJNS2HjWpPfzJhf9/oseQqvkjL7xw9ewTa3PD02Y/XjT2q6/QuLo60muYW/llcMuTphYFBbmk17DRDugNgBAuWAjPGUA3Dc81d00lIHeRsh2KLYfajLzBeVarnnGeN8950Gz1idShA8XFH+DRHvDFD/EY4bysh6Hr16+fjoKwLEET8mW0H9XwJ7outANRYIsmz95cSznFHnsw726PCmymSZE7s+FqplxJkudpE+aPzpTbHw+GeeStNg3/n82ew3OPzp4zmQTQV4QegaCPpmai+QNnHf+vqyMs/4fqiIfURgwGAG4hOEogRiPTmzd1zjOZnmuXVFO4LIGr5mQsak5mJpzXmKNT8jb/Bbts07oAAAB4AWNgZGAAYen931bF89t8ZZDkYACBIx8E9UD0OZEzun+E/l7lLOKoBHI5GZhAogBOMQvyeAFjYGRg4Ej6e5WBgdPoj9B/I44FQBFUcAcAiWcGPQB4AW2RUxidTQwG52Szv22ztm3btm3btm3btm3bvqvd03y1LuaZrPGGngCA+RkSkWEyhHR6jhTag4r+DBX8n6QKFSOdLKaNrOBb15rftSEZQrtIJGPILCkY6jIjNr+KMd/IZ+QxkhjtjAZGRqNsMCYRGSr/UFW/JbX2oq9Go427QIyP/yWbj8I3/h9G+5+o5tMxWscbE6xdmVp+DqMlJzO1Bclt3mgtwOiPxcbmGI2o7KObO5lzmD+huI7lb9+ATv4Hvv74B6KY4+kdvtQ1FJG4dHCF+dH8hatOQjcCJwPszsXs7l1oo/HJa86vKSgqu4lmdQGjpXxPH/k1PEfj0DaoP7ptc7vQKphrtAksG81RySdb+NnazfUr/vEPiGj+1/jGKCizSSLCLPPvPi8Nn/39X/TWlnbvheT1IympZ/gt9Igueo8S+hcTPspAYdeXBu4c5bQmrYO/f9Z3nM7uM1prdkq7stRw5Sknc2miy+mn35BK0jFGvqGmJLS5k2ls66t99AVzPqpkHKWehigT/PuH+Lhj+E6QRZDDSyRneH+Qg/moscqXIcLLDN5FM5DTN7facniTZzlsY4Bepkvw5x/io7UkeJaDZfAm8lt4kfxGb/MKY6wuI8UbGbxNX9JrV7Pl8BZBDoPpFjjY6+MFVPw4OfndJYbLPNq5I7TxnZn8UVtmhEaSzsgYWK4ZN8gox83b6SL1qCFVKeBGENNNJbXmJLu2Z5RO4RfXnZyuEuVcQZsTn8LB3z0FW2/CPAAAAAAAAAAAAAAALABaANQBSgHaAo4CqgLUAv4DLgNUA2gDgAOaA7IEAgQuBIQFAgVKBbAGGgZQBsgHMAdAB1AHgAeuB94IOgjuCTgJpgn8Cj4KhgrCCygLggueC9QMHgxCDKYM9A1GDYwN6A5MDrIO3g8aD1IPuhAGEEQQfhCkELwQ4BECER4RWBHiEkASkBLuE1IToBQUFFoUhhTKFRIVLhWaFeAWMhaQFuwXLBewGAAYRBh+GOIZPBmSGcwaEBooGmwashqyGtobRBuqHA4ccByaHT4dYB30Ho4emh60HrwfZh98H8ggCiBoIQYhQCGQIboh0CIGIjwihiKSIqwixiLgIzgjSiNcI24jgCOWI6wkIiQuJEAkUiRoJHokjCSeJLQlIiU0JUYlWCVqJXwlkiXEJkImVCZmJngmjiagJu4nVCdmJ3gniiecJ7AnxiiOKJoorCi+KNAo5Cj2KQgpGikwKcop3CnuKgAqEiokKjgqcCrqKvwrDisgKzQrRiukK7gr1CxeLPItGC1YLZQtni2oLcAt2i3uLgYuHi4+Llouci6KLp4u3C9eL3Yv2DAcMKQw9jEcMS4AAAABAAAA3ACXABYAXwAFAAEAAAAAAA4AAAIAAeYAAwABeAF9zANyI2AYBuBnt+YBMsqwjkfpsLY9qmL7Bj1Hb1pbP7+X6HOmy7/uAf8EeJn/GxV4mbvEjL/M3R88Pabfsr0Cbl7mUQdu7am4VNFUEbQp5VpOS8melIyWogt1yyoqMopSkn+kkmIiouKOpNQ15FSUBUWFREWe1ISoWcE378e+mU99WU1NVUlhYZ2nHXKh6sKVrJSQirqMsKKcKyllDSkNYRtWzVu0Zd+iGTEhkXtU0y0IeAFswQOWQgEAAMDZv7Zt27ZtZddTZ+4udYFmBEC5qKCaEjWBQK069Ro0atKsRas27Tp06tKtR68+/QYMGjJsxKgx4yZMmjJtxqw58xYsWrJsxao16zZs2rJtx649+w4cOnLsxKkz5y5cunLtxq079x48evLsxas37z58+vLtx68//0LCIqJi4hKSUtIyshWC4GErEAAAAOAs/3NtI+tluy7Ztm3zZZ6z69yMBuVixBqU50icNMkK1ap48kySXdGy3biVKl+CcYeuFalz786DMo1mTWvy2hsZ3po3Y86yBYuWHHtvzYpVzT64kmnTug0fnTqX6LNPvvjmq+9K/PDLT7/98c9f/wU4EShYkBBhQvUoFSFcpChnLvTZ0qLVtgM72rTr0m1Ch06T4g0ZNvDk+ZMXLo08efk4RnZGDkZOhlQWv1AfH/bSvEwDA0cXEG1kYG7C4lpalM+Rll9apFdcWsBZklGUmgpisZeU54Pp/DwwHwBPQXTqAHgBLc4lXMVQFIDxe5+/Ke4uCXd3KLhLWsWdhvWynugFl7ieRu+dnsb5flD+V44+W03Pqkm96nSsSX3pwfbG8hyVafqKLY53NhRyi8/1/P8l1md6//6SRzsznWXcUiuTXQ3F3NJTfU3V3NRrJp2WrjUzN3sl06/thr54PYV7+IYaQ1++jlly8+AO2iz5W4IT8OEJIqi29NXrGHhwB65DLfxAtSN5HvgQQgRjjiSfQJDDoBz5e4AA3BwJtOVAHgtBBGGeRNsK5DYGd8IvM61XFAA=) format('woff'), -} - -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 200; - src: - local('Roboto Light'), - url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEScABMAAAAAdFQAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAABqAAAABwAAAAcXzC5yUdERUYAAAHEAAAAHgAAACAAzgAER1BPUwAAAeQAAAVxAAANIkezYOlHU1VCAAAHWAAAACwAAAAwuP+4/k9TLzIAAAeEAAAAVgAAAGC3ouDrY21hcAAAB9wAAAG+AAACioYHy/VjdnQgAAAJnAAAADQAAAA0CnAOGGZwZ20AAAnQAAABsQAAAmVTtC+nZ2FzcAAAC4QAAAAIAAAACAAAABBnbHlmAAALjAAAMaIAAFTUMXgLR2hlYWQAAD0wAAAAMQAAADYBsFYkaGhlYQAAPWQAAAAfAAAAJA7cBhlobXR4AAA9hAAAAeEAAAKEbjk+b2xvY2EAAD9oAAABNgAAAUQwY0cibWF4cAAAQKAAAAAgAAAAIAG+AZluYW1lAABAwAAAAZAAAANoT6qDDHBvc3QAAEJQAAABjAAAAktoPRGfcHJlcAAAQ9wAAAC2AAABI0qzIoZ3ZWJmAABElAAAAAYAAAAGVU1R3QAAAAEAAAAAzD2izwAAAADE8BEuAAAAAM4DBct42mNgZGBg4ANiCQYQYGJgBMIFQMwC5jEAAAsqANMAAHjapZZ5bNRFFMff79dtd7u03UNsORWwKYhWGwFLsRBiGuSKkdIDsBg0kRCVGq6GcpSEFINKghzlMDFBVBITNRpDJEGCBlBBRSEQIQYJyLHd/pA78a99fn6zy3ZbykJxXr7zm3nz5s2b7xy/EUtE/FIiY8SuGDe5SvLeeHlhvfQRD3pRFbc9tWy9/ur8evG5JQOP2Hxt8ds7xLJrjO1AmYxUyiyZLQtlpayRmOWx/FbQGmSVWM9aVdZs6z1rk/WZFbU9dtgutIeCsVivND1dsWSG9JAMKZOeMkrCUi756MI6AN0g3Se1ellm6GlqOXpBxuoNmYXGlgn6D/qo9JOA5ksIFOoBKY79K6V4qtC/ZJy2yXNgPJgIKkEVqMbPNHpO14jUgXr6LcK+gbbFoBEsoX0pWE55Bd8W/G8BW9WNboZ+b/KPyWslDy5K9biU6TkZpY6U6ymiLdUv0Vyi9jvt1boT+x9lTmyXzNUhaHKIcqyEaDkLfw8YTQBNDpo2NHmsVjZtrl2u/kZLmDlHaT0BJ1HTZ45+gbdfTSznJVOK4WQkWAAWgiYQQB/EVzAxYhheIvASgZcIvETgJGK8NfDdgN1GsAlsBllYO1g7WDtYO1g7WDrMcAK+a2UA6xci+kp0i0EjWA4s2nMZO6DNrE4zDDbDYDMMNptIHSJ1iNQhUodI3R4DafGzG8JSKEUyRB6VJ+RJGSbDZQSrWsb+KJfR7OAJ8rxUM/Z0xq6Tl6Re3iTyjUS9WezsQ+7e9L7j24G//uznFl2th/WAOrqPNelG0hq5z6Srk6Ub4Kau0Mv6qe7W7ZQPsxIhPcgeX3sPns6DCDjYSX/9rj3/7ka8bbeNGQXHE/UzyZb3Naqtt/W+FAepZ1J3mVOWPoW7ipYzFE8hSiE3Erfcabyo/I+kF7TVzPBMiq6VU3Wr/FGy9F2y1MD5aLfeG7ukh3SKztOQHtOldxmvgTW/3uWKBeLrqifdSuxbPeNypiOTPb/StfqBbgBrYCOIKkifoH6ou3S//oxFky4jLzLWvTSoV/RrU96pR/UY36Mdx9VzerNDbA+b/M8UzXE97TKTYCcvdY079Fxl8v2duY3vJb3Y3lvbjK+QWdMjScujKb226ze6V0+AH9gHId3G3ghxPk5yZs+m2BVzo4j+otuYZ3wX5ibGa4uP3R5tYufcaU32pGm7er+ninU2ffVaVz47Mt+tHXstTVvae0Cv3PeYTjqG4n5v927ukWDyTnDucuZXdXEerpqzcsc10D9M3nKnmNPFnZ6n7nOlY/RxrdBhYDA7yovKyx/Mq5N0vr6l67EIaA4ne4k5369QP6Kvpd4r8RRjZ+hP4PPkPrp4i832qOJ/AP1E1+ke7uE9nPDWJJ+Jrx4Cu92zEZtr6m93h6H2O7CDtjENA6eSpZOdzwL/84C8m3g93kuyeVN44C/L1LyIT7J5D3gNqz0SVjloc7lZuAc7/RfC3NHu/+dBU8tP6vORAnN/90poeoM+5H3vIaYsM3omo/oYwfVdgLgpk6+vWxvGSuQWfkuMV4v5+Q1TAaIMIr2ZVYhyIWLzCipijKGIT4qRPvIU4uNFNJz8aaQvL6NSeBqJ+HkjlcHUKCRHnkEKeDGVw9dopJdUIBkyTsbD80TEIy/IFKKoRLJkKpIpVYhHahCvTEPyeGVNJ7oXkX68tuooz0SCvLrqiXCezCeSBbz//bIIyZAGxCOLpRGfS2QpHpYhPlmOZEkT4pcVSJ6sk/XM1325WdKC5JsXnCVbZCtlG75djiSFI9uwkwE37hv6Md6G2cx+NJYVzKs3MxtPlJOQ/sxtqjzEO7FaBpk5PMIMZtKznvgGm/hKiKsJPjcw3oj/AIgWgIQAAAB42mNgZGBg4GLQYdBjYHJx8wlh4MtJLMljkGBgAYoz/P8PJBAsIAAAnsoHa3jaY2BmvsGow8DKwMI6i9WYgYFRHkIzX2RIY2JgYABhCHjAwPQ/gEEhGshUAPHd8/PTgRTvAwa2tH9pDAwcSUzBCgyM8/0ZGRhYrFg3gNUxAQCExA4aAAB42mNgYGBmgGAZBkYgycDYAuQxgvksjBlAOozBgYGVQQzI4mWoY1jAsJhhKcNKhtUM6xi2MOxg2M1wkOEkw1mGywzXGG4x3GF4yPCS4S3DZ4ZvDL8Y/jAGMhYyHWO6xXRHgUtBREFKQU5BTUFfwUohXmGNotIDhv//QTYCzVUAmrsIaO4KoLlriTA3gLEAai6DgoCChIIM2FxLJHMZ/3/9//j/of8H/x/4v+//3v97/m//v+X/pv9r/y/7v/j/vP9z/s/8P+P/lP+9/7v+t/5v/t/wv/6/zn++v7v+Lv+77EHzg7oH1Q+qHhQ/yH6Q9MDu/qf7tQoLIOFDC8DIxgA3nJEJSDChKwBGEQsrGzsHJxc3Dy8fv4CgkLCIqJi4hKSUtIysnLyCopKyiqqauoamlraOrp6+gaGRsYmpmbmFpZW1ja2dvYOjk7OLq5u7h6eXt4+vn39AYFBwSGhYeERkVHRMbFx8QiLIlnyGopJSiIVlQFwOYlQwMFQyVDEwVDMwJKeABLLS52enQZ2ViumVjNyZSWDGxEnTpk+eAmbOmz0HRE2dASTyGBgKgFQhEBcDcUMTkGjMARIAqVuf0QAAAAAEOgWvAGYAqABiAGUAZwBoAGkAagBrAHUApABcAHgAZQBsAHIAeAB8AHAAegBaAEQFEXjaXVG7TltBEN0NDwOBxNggOdoUs5mQxnuhBQnE1Y1iZDuF5QhpN3KRi3EBH0CBRA3arxmgoaRImwYhF0h8Qj4hEjNriKI0Ozuzc86ZM0vKkap36WvPU+ckkMLdBs02/U5ItbMA96Tr642MtIMHWmxm9Mp1+/4LBpvRlDtqAOU9bykPGU07gVq0p/7R/AqG+/wf8zsYtDTT9NQ6CekhBOabcUuD7xnNussP+oLV4WIwMKSYpuIuP6ZS/rc052rLsLWR0byDMxH5yTRAU2ttBJr+1CHV83EUS5DLprE2mJiy/iQTwYXJdFVTtcz42sFdsrPoYIMqzYEH2MNWeQweDg8mFNK3JMosDRH2YqvECBGTHAo55dzJ/qRA+UgSxrxJSjvjhrUGxpHXwKA2T7P/PJtNbW8dwvhZHMF3vxlLOvjIhtoYEWI7YimACURCRlX5hhrPvSwG5FL7z0CUgOXxj3+dCLTu2EQ8l7V1DjFWCHp+29zyy4q7VrnOi0J3b6pqqNIpzftezr7HA54eC8NBY8Gbz/v+SoH6PCyuNGgOBEN6N3r/orXqiKu8Fz6yJ9O/sVoAAAAAAQAB//8AD3jarXwHfBRl+v/7TtuWLbMlm54smwIJJLBLCKGJCOqJgIp6NBEiiUgNiCb0IgiIFU9FkKCABKXNbAIqcoAUC3Y9I6ioh5yaE8RT9CeQHf7P885sCgS4/+/zE7OZzO7O+z79+5QZwpG+hHBjxNsIT0wkX6WkoEfEJCScDKmS+FWPCM/BIVF5PC3i6YhJSmzoEaF4PiwH5KyAHOjLZWiZdIU2Vrzt7Ka+wvsELkmqCKHtRYVdt4BE4FyeSoX6iMiRPKqYCxShTiEh1eSsV7iQaqF5RBWp7FaE4o6dwoVhHy+H5apHH6iorqZf85805OM15wrd6edSAhGJjfSCa1KSp0jhWk4gFiFPMYeoEleg0DpVcNXXii6SBCcFl2qieaoVztjYGdUOS3XslExxjbAHX+fyZYFqoTQgdCfnvz6snaPcl/AK611DiLAGaEgm6fRmEkkCGiK++MRwOBwxARkRsy0OjmsJTTLZ82o4OSU10x9WiaO+xutPSM70h2pFgb3Fu9LS8S1RrK+RLFY7vEWVjAIlqU5NdNUrifomza76iMlszavpbRIsQI9LjYezPjjri8ezPg+c9blUG5yNc9WrAZqndEna2etfp3OJL8+6s9e3p514oCS5argkkwfWZa8SvsIiNZZEMxzEu2qs8TYPXqrG7ouDD7jYq8xevfiKn/Gzz8C3Eti34JrJseukxK6Tip+pSYt9Mh3P871dHI9EumTkQkpqWnr+Bf8pvZNABJ7CgCcAP2Eef8K+IB/wBfigB3+K4K1rqGuwVk/bDRoziHaDl3/9z2ByXjs1YMwA7S14uY92G6y9SVfeQV8bRZ/X2M8o7bo7tDK6En/gPKggqTzfkY9Kj5AO5CkSyQMJKm1BDub6SJ6IPM3LteRFZBCm4g2rKZb6iJyCp2W3BbQ0v0Bx1KnpoKIko05WOXe9ku5SZWB7bkj1guDahhSvSzXDicSQmuWsV/3uerUAxCOngyrHFSteucYmprTJ9BcrZrcSLCZqiii7txPq8CdkwVngQlHYGx8OdSnsnJ2TTws7dykClUyjThrsnB1sI/m88f406vNKJl+wMJ9W8uWHHvvblsd3fPT225vLtu3l+PLnH//bs0ve+PCtj5TS7afoc5L63KqKSQ9f3WfnS2vfcxw65Pr+gLhi96r7py7r3e+V6g1vOXb/3fYxWNCk8z+JC8WDxI7aDdzpTh7S+aN2ctRHBOCImuCor+2amSfY89SucCjb2KHsqKdKjwKF1KkOYIHDpXp13UWFzYDDfDjMd6md4bAtaGlP+O11yO4am5ACRlCsds6HP1Iz89LgD6J27SS71ZT04mI1QYaj1LRiZArwIRyKT6VeKdgmu4gxqCfVGeKhfpp1mfcnrZ43d/Vzc+ZXjbprxNDRJcOG3VXLvXVDtJjOgTeqVsMbo0v0N0qE/gPmbt06d8CcLVvmDJk1a8iAIXPmDGmQhakdzz26euCcrVvnDIy9NXD4jJnDCHiz4ed/El4DvrUhHUlPUkEiKegVMpBx2VJ9xIqM684Di3oxFgVBeYK6eXeCw04utSsc2kGT7C7VB4fxcr16FfxGPmy3ChnZHWRkks8OTHInprZjTOqeLbt3EJM9MbVDZ11rOne5ijJ1ATaAdjgp7QUeDdTEbwrmOGgjV4rgUzkmB/WAHhXBRxiPhj+x1HnzwMiqx18adtsa+lynLpP+0u81bumM2w7d9/Hpyk1rR2y7VisRTVzBtEEPXXW12q3TPSPLJtN7K98YYxvz4l+rNq+dOWzB1TO09OuUMfM+/+th8ZGBt9ZFZlVffw09JpqEzJEruEN9Hr1pYYeSroPGLgAbnCb0IceY387WvbbhsqkiXeCvkVGN3nmauSxb6EOt7+3XThK05Ye1TtxEaSiRiYdQxc0YbAWr87AveQpdpCidSpzsc7mBDdnkYRq/SUp64vDhJ5KkLdoJrqeTjud6l9C/3B39Vdvu1bZHfx1/7RiuM17brXWivza/Nl+n2puu3cUtF7q4nKJwPIHLE1PQ/fiRow8nSS/TeO3EZkmrKOPc9EYv/QvnK7u2JLpXe8qpPRx9bwzbdyo3m78B4oiD3EMgpIKzoQVUcbL9cyB7EczExZy5kp1EIQjnv0NUQvPfQfd+ovP+TPTqDoW4FMdeQaEuhdvLqZwjP58qDnSmVBU58Dc20BQeY6jE/IrIh/ksv+gx2WiOJzWD3iiMNdO+Aa3mm9vq3rvtiHBr6Uw6VVs2t/Re7YuraCft4560PWH77U+WC52EHRBlbyEKKVBMYZXa6hUxBMJD70is4DQpwUPKo6OEsGutY3EcdFwIRSxWfM9igo9ZLXhoJZZY5AW3D6EdXL0clPvTyHT6utZvOjetnH6i5ZdrafSYvofBmkadZBfoTBbuATXG2kxjQDJoUwKSKxY3qszgfhXj4Iv+6pe1E/p1OnHdOBe3Biy3DV5HpVI9/lBFKAAW59XyXtREwB7G3nyd6Ddct9JS/G41vHQk6+G77WIIxl7feICXQAny3nr2o18CsUv10vXr8ftp5x/g/s0wkEwAMiHwgVX1z/lpmKZxoyZEX5gtdTjzKcNMi8G3BA2f3I1EbLiQLMW8MTqVFN3vOpv8LjAi1fCwqk0oRlZ4ZJc7HHInUhcXbMN59PAi695x8ekjR/44feTw/1SqGzZsU6qrt3KFtB9NpCHtA+0H7XXte+0j2omavv799Dd0/Lf/+c+3QMeu82e4DWItyKI7iQjo7zjcEeVcGXsLEO8wsQjACidslkeBC9SiGzNoMxMRMjcLRL6L/rtSNN865Gw/sRvyaDJgLBloToKjiAMptgHFaCRqPF8fiWdXi09CLUvWAZPMABPYpSrBcpIHPyDZQdU8Eh56HLByCrzrSZTdEd5mLQamqDbgj+IsVuLliEQ8xSzIZBvO00T9oI6FNOYefcHJ4h+f7Dr2zGJtMsf93FBJjy6c+OzDGzZPFjw7Gg7vqPyfFVo3sXQEl/rUOyOWrH91JdIx9vxP/GmgIxe0JtIW6RCBDrEtbkkEZkRSkCQvkORlCMObYMmrtce1TYGQakfR5unuACID51L8iDcS4DihADEFnEKUgRBDyXIp6fiuDMdyAaKTiJzOMEscEN4ewYcfYgegjrYsdsQB4FBJVnGxYpeVNgBJ3GpienFL5JEHxsMOGPU5jYxhyCPYJnMsV/7Gs6u27nhp2bI161eueLimnBP/3L3/h3nTliw+d3CP9jNdJC1TXnj62SfL1sxesvbFxdLLx+p23729fc5rc/Z9fQR1ux/IuT/YgpU4yRASscS0qJbYLJwdgDoAZ6lekQAYuwoUS50SF0LlVvhQxMxciFkCJloYPLagN5FRuWyoXLRY4WTFwVSMhmVAkqBnkJjkmPpxax44frwi+h2XKoVpeV++oSGrVHuclpfyvbiJzD9sBZszw77SyX4SSW2UW2qj3FwoN4+tvsaR6jLn1fptqS4Qmd9WzxC8s64myUkceSoHcRxFlOSMAXPmyx1O9OVOh+7Lr9p8ZjH6clFxuhTXXjBixbN351UP/tkVztpqvA6PJy8CrxkPZTwUlEBli4nizacRl8erw2aqmtHTpxYrSaABbtRsB8g3QsxJxRfIFERpyvEgpO5Fi7q4fV5wBtlbufHVy9a+8MITDz8ZGH0ztz+6rkvRwik7jx/9uvYXOl168rkDO9cdHDrMxadOjp4JdeH58+TwUe3PdwjzTyuAV+nMVnPIXSSSgNxKi/knG19f685MQIjoFoE5bZk+J6OrCinJLmSK6gPmtIPfgWTQUMHkTmAampkGGupzAgS0uYE4c7EiyIoJqZE7E9BEvykfAI2UCgYKbo0RQoqak7mCpn3cf3lxenH5wLWf9dg55cDx3w+8o52r3Pv08m0vV03fHuBS6OQG2qtNRklGWsP78weO1H498rn2I23f8PGv/3pxW92cu5guDAAdRV2II51JxIwaik5bJWie9gLFXIfpaixFg8CnOlAHiRk2zRfr0cNKeVOwyE08A/jXT5zNtVXacqn5C/GGsjLtx+gebemMGXQq91dqIoglxwA/7cBPPwlCjnw/ifiQo8nAUQuu2wE4mhPwWYCjObiFjoyjCcBRCR1AJhwkuNQ04KcbDnPxXBwwuBOcyM0ENGnhfckBJ2MxMlx1E3ACObLq5OF3B7caJxXrULKoGZJkNi+AzTfnsKfZ8ZiqRfcuPvn3Xf956N5FL2hnP/hEi1bse27FgbefXnGg3ZYli7aqCxdvpgvm72nXVrl/10cfv36/2rbdnnkHPv3kwGNr1z360JYtXMH8Vavmz6l+HnVqKPjNfxk6BejIGot5LAJkAQcS0qw8cCBBatIpbz0qFIQ/JRBSTV5dp5LRFdhZymV18LpmyVb9XAK6BzUL9Yz4dKIJi5BeAkaRU5RGWQKBuJkzcLNO7FByftenmnb6i4Grr4vvu2jwhgOFNZPe+m3W5uULtmVtX/XIK/zuozRXO6md1QZHtfq09DEZKV9/uHzEGOr9cuOxRSUrP/zytG47GCSCQldWD+nQhCYYIEAsYUbSADshlAAvyBCFpRFR8PCzculSwBX83xBbcARhTo7QDWKyhXQiEROgalXCC1ljAEkxh7D8IeH1CljR4AK0ZMOXcYCY0pbGMJOwAq+u28IMfgn/EVydgFf1UZPPT30D+O7RlRMmcGX099F0xhztlxQpRTs9B/fzFN3Af85vYvQl6UjLqlNnZdQZxKCNUPh5iu/TsJvvQzeMG0dXjRunrzkL1nxHX7OokBYV5lBYeRZXOWFCdAk/YMYs6k4GL+CcqT04mvH0ZjCi65nupJFJJJKMPE2xx9CDrSV6SNfRg5uhB4CiSnIIzaU2zUu6C3lKXCOkYElsXBLoCh8PhuKRVYsLHW18CjpaKe4C8OCgviB42Bh4MAWRqzfzdRtq3l00o1dyBc29Y8JdS+bcD1GHtlkmlLy4+9DmxR9PLRwx6oG7byt/Ztq8h5fed279ypVAzwytu/S5+DAJk2vIFhJxYrXCElaLxHolLaR0KlBzHfXK1QWqD35lFqg8Aq++zCRyIOfO0X2sBMlEP70ydNW+s1P11KGnS+m1FzzLGSVpL6lJSu7ZC+swtPGIhZYcsCCVtgWaA3Jvi4WXM3PzOxV2w+KF5FZNbZAJzlz4TId88NVXFwE7EhINdrhJIIPwEsYYI/3s4mauO8xLzJ70D3AkAMd++EQGofobPWiRh/n3GW76Ga2gi+lS2Vr3wcB75MLnyh5Y4vGf2Dhyaj+OD1lvKnr0RZtbU7Sntb9rI2QPnUhvHlLbK733B3dqC7VRXLHr1lG3P9KZFmQM7PigQr+mGzlJS9WGHNb2lQ0fNfqXgxoNFxZx0X0LR515iy6i27R22jxtkdahfbB/u470Nzp11au3T4UMlsvwJ/0M8oCsXvgG4oEJMqH2us0qfJgFhVrJTCi4JQlxQFwBy21UipHAigVMAPdBPsB7AkAo124KlzXr6Wjp07u5G7WvJVE5exN9WhvHUcg9WBzYA+ssZvmhH9Ycb3gHJ3hBFn8y0Av62XLMCwaYyJ3o/kMAJJje2pz1NaLNYwYDgPMpYHagyG0o/slCKlH9TpYioi+ECJuhY3JIxJojvayA7uUDhbGDPfSl76JzJy7aEP2HNo/Oe+HV6jXaRDqoasurivaBqOzZW74hI+HQwv2flK557IGNpcsWP7RMt+WFENs2g22mkrGGZXqAHk8yg+jxgKsYaIgDPBwn4Lk4CxppGiPNBSS4WPVTsYQYDDaF1HQslrhA+4TkYqRClRJRIeM8cMqUoFeNXODVBUj9UZ+4VOp1o4KF/RLEM7KQ5v72I3V5uPKEd17d88MPe1495C/nPNrP3/+m1XGjT9J4OvqPb6Tte7XDP5z6t3Zk1+vSl+fonehnUD7vg3wsxEM6GtKxxqTjwdDsjdUiFKsLUQHzIz7dfcug+FgzCAB3SU/amSBXq6mNjtDWa79DutXxMPVrP36ufSQq2nNa/evaj1pVKc3/Yfdxms94iesPhfVt5DpjdUtsdQF0Q9RVUeSZKuJGYmk4S9EtgFQUa0jPx40kXE/A9Z89/FMNx7i/R6/hg6JSFj1aFl1fShrXHcXo7q2ve/GaJj3itLamsaDtggX38C801HEHoj1wsbfujt6ur7Uc9OUD0JcMrKmlxfSlFSWpTUhMQ5DJ8uFAK/qCkNMUisQzVYuHNIvZga46aaA6yTKzhwRQHCW5WI2DNNFAmy3Uxyfr6iODMchMg5bTwj9+ohYfNzlp364Dp7T3n3g3S5tNz3XSogc17XVuCMjUQW/9aZe0fLt2/Gvtt+PaVzd3pLPKomevm0mHNfG0nsnyKsOjmHSPoojhWivPuGptkqSN9UcUm15lFljDpFGG2IAJQ64DTK3ge1RUNBwQleit3OazN3FV0RJ9PUi+6M2sBhFoJsPG2gVcDX/ExiseqUT/pH/3FsBmKnzXg3rnaMyNHI25kYVdCpTfHctcWQ5k05Vfz1UcwGsL5CiKu3l+AithZpmTXdj5Fq5843OLNlee3PV+xVS6TKpat32F4Dl38q2fxpXtNcd49jPzjzGeWZp4xtsZz3j0jM7G8ggXwooaUXm7nlFQPaNACsE5+y0U4nQQ2PYW13MxF93ALeIejT7/NrCvhKsSo8XRgMhtiQ421jbB2mIsAuBKBg+lGA8jPNN6XrTEKphMOL49lRwY9dntTfYkdYRryeQ241qmuHAjJbGKJkvsdUaa9AKkKhPGSMUs13BinB0jskmv92F1JcLbHCwKM9ooaoQnhwapySPvWc35JS6xqsIqRb8bHD0u2WA7msiBhjzAzebOakIDjS6Jzm7SzVNMN6+9SDebKyRoo2Dszo7ixt1xLGszG1tSeUtsQ0WootQk76nku0ugowchAJ5Lo8I/z94kHKfnUsG/zgLb//7Cupc5VveyXLHuJdj0uhf4/5ivzSAeNF83+Fssgvlm0Y6UUIF20d7VGs4T7cPK+o8+O3nqHx/9iK4/kY7U1mo/nNS+19bTETTpZ+1bmn7q1AmaoX17QsfvyJu/sfqFh/Rp7g3B/9dabEwHLS1DgS2E0cCJBV4jGqgem9wy8AYDibQp1v7+r3Pn/qUtoHNqt9du1xaISv3efT9G13H7X1n28Gv6Pmadby86gFcesOebSURGXvljvEpDXrVhG/DCBrwuNcngVRBLE17Muh2yjbWjZEiMABXIumalyaBOzVjo5Ux+UxbDaZdg5MTSs4O1P7s/cP0lubleOzP4RP8zqakXs5Qju4CfH4nbALsHSamhbS5d29QgsDQxmbE0EVmayShKAoqSQ0qSnvmlM/SuiCE1C9UgSTfzOFmRgapEomMd5uqV4EVYB6BBvN8Hfp41jZqJYBc9+e+zD85YXJGRNSMrbcsqbSy9++CO7a9oD4nb3j847ZXcNtsWLu07oU1C5oJrFz24KjqJ+3PN4sdXge1gLl8JculAyluv/2GTUU2BUJYi47mUhJYdxvbNOoytNBTN7bGmZ5ODLK/FJmKNw5fVvtUWYmY45AdCfaaWLUQhKKG7HcNN0jZv+Sxy9NQf1HP4nw89yE/6UN12cMc3P/2ufXf0i7VVdIX08voVsyue6dZj77rqT2ZP3yqK0vJdz02b9GTXHu9Vb/2AThp3SEJ/0QFk+BjDx2C1UvN6icKHWEor1aHuR0RWmRUBFEQk1naVsILXlBFiL6CDUKLZKrFScnaHeAPzR9Ws14b+skjPhlTJ8L2KtdFd8lgkdOHFWPUD3SWkLljsZaVwiDONAQfLGtWVX6m1xyq0o//+QTtGP+O/bMja+e6h1/H3zw1R3Q8i7v+Q4Z6AUakkHBs1QKzDAI1KLLGiT5j6w0WI9zMW0B2pkJ9uXxD95xTwcdeOHi3shFBKSTH4fewD+EitXuNRnGF2yQjFAACXjWekUEjVqUuNww4hyl7P4t7485erWVufuBTfXofe/9m5r+rkcaOUmO9Q5L2q2XdGVEzwxuyfb8FqIsSQGpfs9ORF4LVZQbGGM7tklv3t4Exmp0v2NXXlKaxthGziQ8fKvDiQmE6RRP9VFAmlOUETDRbPpJb2UhHtPIV2LpQKqGmG9tAU7bVsKUvbMRXIP/EN/VbwnjvxT/wFvv6OZ589t07nb3fgr8LiTLZh+eYwKwYbcUbPpjiMI4KVxREL1f8PWmh3elpLfoI+S1c9oaXQ049pt2m3c8e4D6LLuUnRUDSNWxCdA2sEYI2dsIYZEbupUYY8LGApUEx1DKFbEambWPQCivUDpBfWooirltG9dP+y6MkKUWn4nG/XMCZ6gkvWaYDEQBjPdCQ/FstjeJXn65sUxaRXqAE0G425cCENYBEk4LuTH9bwBv9xwzp+9gjh57K/noszcMI67W16UpoHdlXIKimA7LGSQvlYnajW5CV2IQ9RDphX7C8+FDMpgB5BOexbR2/45BPtbdOrZWe8ZXDdjucf4MVYP4q07EeBkIMd7+NG3ScqZz6FzxLYQ3+2h15EMRXoRl2A2J/twVQHy9VK+sKSS6VghRTs3RXbjClW8fFB+AcEHfj0U9pf2/6JdKLsz+uxvsQd4RoY/xp7YwbLYC8sfQYt4wfQvGE0d9qBNCntDfjC59F29Pi4cVqKzid6fhU/lWXQSc2wGR40IywM7oXyUxoeK2XfuUPYSfeLB4hA2hC9AcELxIWdRZFxFnLyOAG0Qt9IUdgTvINbeeg+cY+o/YHx927AxG8LAyFq5ZMTemarJIUjAVw9xwoZLhbizBDA+PYBD+JSLNIUMPPGgm2mS7Ghp2cTAECvG09hDTcipOaGQiFI0zGtVzsatn/tb/2Z7SfnC0rqXlFNij8jKAl7d+799XcLs/IEV01iQpInT0l11aSkJoO5w59N5h6Bc8zqExJTUmM1n8SURnvPtLNBFTUNgEnEE8hhzTI+AJbnx1zJLEdszni9xNM5s3usQVYAJt+5iFXAwL36IZAWNp85KITP3E35r0499eDsFydxk6Ztr/nC7pwdZ+3x9uyqbRXTx89/s/1/1u2nGU/XPjht4ZzhVJKkqcNG7Xg5eqJ4QmHRTe1uK9+4dMjk6SOPLWOYZzXEAUlKAE1JJ6MN7GVHhvsA+EjI8BQ8YH01iWJczWAMd+uJgOyqV9wuNQHnwPTujOpG2OPSywh2JDkF3Z2LN0CrzDoNst4zyTF5jPowIiDJtLqyy8Zp+7/66o2KzYV2ue2a+1dXPb969rNZUkK0cvhd2jta1Peb9s2dQ9fRjJGTfzzg+5Dys0Yz3RsNuvMO051RRNeYeNDX+ECsSBkRkBYnYAQnS3edNqRFRz8eoMXjUhNBL+JCaqqM5V0GfRKxACIEWHEuHg7NqcYEjbslDEDMg4Ew7Pf6vCbIvbjRv34Zuf9ebvy2uVurNygVO8ZxlbPXH/0PZ849QTveU7ZOEqUFq878PXfvn0umS5L4aEkpLWDymAx0fGrI404dr+vhGeUhxOQhMHkI5pbyMARhsoGux6SR4EYSnKBvVhmU0ZBGnMko6rBCImYROc0L9LKepU/+8sCUDUUV46xdXr5335eVq6umrcpr9/T0qjX0vI/ytGjUEG7BmR9X3z6CBn478OPYEbRh5H1a9ENGxwig4yOQRzzQMYxEvEiCXTJISMWqm8UrxKpuGc1LPIlG+oO7T7QirLZ7/Swtk1WXjLKw2FGhZEMWhE0rBXz61rH+2YZ4/AHdnEZQ2+63jkeFfVXlVV3DPV+f/67223yOm7Hh0UW1NFr0Iw01fFKW+sofvbrd0rs/bU8nimmP7H4X9KkPEFEjdSB+ciuJxDOrwPgjWQAk4WykHFaJCGoDWCyhQIlnExo+rJWEmk0URuJ9TP8QkSVixJLQJVjYvsN6W6ixAacjtT41654M9A06E8JtSsZSTtMq+cMlVesiVstdkmlWeVVJQ1v+MNMTrT9fB/xNJXlkmlEFDIBmmGFzOpPbmpkb9GIVtT1jcBrsL83FsE9mKMZuNl1WoHYAbqcR3XL9co0g25ONyToTcDwZ0htA/2pbe/OKIFOeIr3a0HqnJ6ZIRw/eu7HIUfrDBwOVPum9H7256oWijeX7j1Y+DyqVm/PM9Kq1hkqVjthy7h8f/5odKM0I7Fi75JahtM2v++vH3UH/GFmpNXygx6YqCEtfgI14yAAD41jDuq9yoq9yNvkqb6N9cyE0cZvhp7CCYvMw1ACmTQy8GfNO4HmD+kyHSa6q7FJbuemVymUzZr6YA27ontET/vFNtJRbrTw7f3xUYrq+BTaVCfthc76x/BWVBAOl0KIB5dQbUM7GBhQsiQ2oLRUVFUK3c2+K5Rs34jXPP6L1p3lwTSdQ2ZUwsaI0BQvAFZdCMc5hT99VoMp2PTMG2ODSpeoOGfVRXpdJrCKUje2Te+2urr6hYyqefzStkAoV2shS0TqzUnjy3MTq7VZTeqxHtQZ4jHNljlhdFOtCIs6X8XYiYvA11Ud4OyvNMFZfuj4ktlofWlM5hy5/mNMG0a/5pVr/h6SEhpH0gKglRF8VOWf0P7CHJr6mkEbo0XppbUuFlHDmR/jOCsgH5oJdZGGuyHCLKwXrQGgWqCJKXBjtRPGB4Wazi2Xp2pHlYkUPVuJng6hY+lRzcDJE1w8lVQZ1UVLQgBVZVuN86IsCLSoyfqY+/guUyNtcoVaMt3XeUjmrOrPT9gVbdlU+MmfZCjed/tjsuU+lCd1q7hxbOXPq/O//E13KTX/7xa1LTElStIKbfuCl+ROj5pjuHwH6Wuh+I3VoAJfXeo9BjE2+SPf9F+n+OFtndbryauWyeXPWBIVufx8z8fPj0Ync8p0rF02K2pnu48xmAuznorkq+v83V8X8OEllXWNS1KIsAhjm8BEqaecOf6Gdrdz9cvWevRs37ubiAqdwsupU4BftQ9rpl13ncZoq8Bo6TaOes1obJYiwN4ylQ4kBa6T6ZuyCWApJQCwAybrtcC5WJGyOaWRO5xpgGrt0AabxGJxrxDSJtCWmKXV22cRAzdRNXdqtmrZ63fqq6c9ka6PELzYOK4lhmttvin7IbRtadmK/7wMq3DtC9/Gj+A+M/d9pZOm4/yYfnwKZg63gAgwA4kaY29K/IxW2RixglplbbwULFGGJs3UsMLm6S9zYiqINkxgWKH+2fbtn7m3EAnfcvuZsNpc/6FbEAj+V/pVzD52infsw5q+554EOF+RcTd5R76vHxYGKyI2tBsizcNrHjf4jjsTuWQAO+3TLMuUwxbzHWVA10Z/ncA2d8kS60K02bky5SSiX5k6O+mC9SYA9VsN6Hci8S9SL6GXrRaT1epHPD7gKC0YOI+80p8vuWjFODuI0mJIlKwmx+hFx+BpH0HUXHBtBb71+xMr1RZ0Bz5vUygVPz16377WPN78yvoyb/My8Bx6Y8tIbe7+sfbN8PKXtpPvGTb35xqmZuQ/NmbVp2O3zAd4PXTjlxv4lWXlPzVtcPXLoDInxPPv8T9wUcRDgl9tIxIM8iItBF1GHLqbm0CXWYYpvHC6Nt7SELtgMRHBAZMWpAxhZnwdrhruyC+Xs16f//POA3qlFme602/OmzgX4Qn3aTyXRq8YNFaWhdsfjz3FvwP5Wgow+F7rpfgwtUy+3SmZjk1iE8l5QhFLsrDDJ/BirQ8msKoklFSqx2kqzqlRRI6rNXlm5eNaStRmV46ydlcpN++hb3L3RZW9unjGe5869qd55N8aN9uBX98N+mtWl6JXrUu1n0dyglE2zZ2mlo4RuDZ/NncvnnXsTvno1IeIBuJ6PfGPMHjmcEIfwojXUhH2GVktT3sbS1L6bfj7dSmnqtxPvtihNWUS9NNXzvVND9XmEOEiD94qKHSead+7bd/IelsuaXDVmkwVy2cbSFfzZLJeFc5jLbufMFptew4J8treVM8HfjmaVLCO51YtYBjc8wI3Yq1FcCF4961A7Kfz93d93ljocnKUdLPulQOp44m6hWzTrjTe4L6NZb77JfXnuTe74669HU4ArIeB/LfCrZd2K/nd1qxCdqz3xCA3SrEe1J+ich7X3tPe4HM6jXUt3Rk9Gj9D3tTCsEQTMfIjJxJiVh2tjh9UeVmVEyfEFyHwgTW4uaJAz0yID4F5Fg4tou2yJXveglpv74HxfD4cjrjBu4MhAMSjAT/P5p88lTlppEcdw4uS/Lme2iDc3bGG61aKehU6IN/139axh3MPRJbwzOoXbM4SfeffQhoVGPauvNoFbKfUkaeRGAuZc63eQRCGPzQhBbLMU1JrZCTajk8wwKHYvIM3NYJT6gZ8ebPpTGY3b4lZFux4OWABjdo23gsQK+ya9rt/3/imrXkmae9/wO+4YXjEv9ZVVU7j0sQ/OPL7pVNGgdoceOz5pbVbOuonHHjuYe1PRyZePzVjK9hrRfqV+ViNLIS1bpa569mOUy8ByI6Xar9LuM33Y9yxA450xGtMKaolOo79AjQcaHQW1ziYa+TrFqvep3QaNfhIbbIjHqKc43KrVzWjsRRmJOkkoXpbH+1g+L5kscytH3nXXyPvmJu14rryionzVK9qu3IOPHStfmxlcO+X44++0G1R0atPxGYvHLp1x7OWTRbo8HqPVQj3vIYnkJoLo3GKtR73iUb+SGLHGXWnM3IHmZCyuJyKIZJNQFuylk0S2W1XywG8eQrTdmCbEEKjHE7+edLHk0fdY1cy/Pjn0qvHFAyaUrJ0+5IkhvSd2HXQP/eKBHTfcWByeV+Kcv+u6QV0Kp4/R9zjjvI3/TswmQTJDr5UoaWE1XqyPBJj7D2QY5RK8OcEJpwWWUQniRRWTDL1vns6yGoyWRgklSa5HKWAJJT0D6MEyl15CqbHaEpP1yFjY2d3yfqymKko8uyUrm5vxwd8rq97l+cYyynhO+MdTlbvf58y5R2hOwldfyu+tblZIWbrP/d1xP80BGvH+wo7sXqJn9fuI1FRIlxJDEQnTeAdfX0toimTPU9xhVn/1hmpsKZIZKAyy+1Nk7DwzdMATnLfgUyzoOxUfYoM2QHCbAoULs5QfFC0ePh3fhgVML346Ppl9Wkfe7no1E6ck0KoTEXmrksMAvWGeybTxjjScKQbJmnBmPtyLFuZc867tH5HXd/F8+dLK2U/Y6D7talM4n6cNg63XXmviFpTRtu/Vf7hV+ttSZY12uEwZv693aanz+0ol1kNaDvYWjxUCR7M6fa1LdhA7G4BzIYIM1Xp97ARAAy+vQwM/wiGkzc7GHSN2NppgtwFhUijiYJmfwwV/eUMMKtsdsVq/r0WtH0jx6bUNcGX4r8MyWk03LtOK6b3acPqiNrxCv8GQThWVaAfu06hctq1M20mvhV86jl8revgs437XHiTWNVeJnWEWvS/WOOeJVeYErNizRjqWzOGvxn5YGBnrW7uVtt0ielbDf1jhHn/+J/EP8QDEHj8g1FV6/FedDmPa0QcHmQwx4gGrvGWCidSG8yyZkAiH4WxemN3wWIAW0oXtIs5F8vTRxwT9Zj2lrUvN18dqO8Jf6SGlowtxbq3EPqkW4e19bWX3DovTx2emhPXx7TzZvV2Kc6eTjrrR6C1kvQnf7NiYMW7NksBLjKdVtC3NoVXaaO0L7bBWchudSAVK6WRtuaZpDdqTNGnHM09uELjhk8ZNmjVz8vgJwznhxSef2cEdod2pot2kHdQOaANphPbQ6rW5dD71Ux/E3PnatorNn1c9JU2ZVD2/cuGLE6ZJT1d9xmQ2k6zle/ObiASZIU65YqA2fs2kOfdoJ6j3HkfsgEv10JnaTG0WnWkcXHB/EWlx9xCoNSkDmf1qyCxEuuNM50VSqwWQgPPNeNdlJyahToD0lbah2sTu7I3ExvstL5BXCCQUDikhFxNLu/YA/FPBVwfbhkJKagux4S2YRSHIA1BsGXh7oTsV9D8HhNcJpwKDxUpYrgUREnxT6Y43GFxGjpfoo+fRRBq7naTMkOYakOYRXZqTIAPj6CQmzai2HKTLPVn1l759e5gtZVbhxqG7tg8aP+Le568kzehA/pY5M/relZY4rn/Xtn18Lt/NuV1uvUF7ju65+frb9L7xNGEXPSK+CRJor1tiLblEj0flMfByen6fTMN+ftqHT/Jn4PtWSWvAa5VoA+hKuKoTpz5MDP7H1SvOWIBnd6uY6motumgsLpU37s5m96dIRL8P2CTrFVU9ySoKG/OWJcNmDh6bekfcoNFVT2qrenYv7mCe29syaPDwiUw/F4B+DojpZxE6Kh/Dk/BrAfVqJ+6hOdqRTxqP1tKFdJG2yKMtajzQ50vZHKspnc2xui47ySoX6Gltq5OsvAf4c9E4axEyrPlMKyU68/SZmaGwLq56xclF+UqTi+6LJhcpbqjZ+GL0XX0vxhCj5DOkiLw8BC8FsBeBmEkWiYgYaSQG7ywFiljHCj7YDjaLLKE31MFGAecdwqveUWlc7sxPxoAcr88tmTqzulIG6dnq5FKgtcpSm9g90YKN3RN9heElRuelJ5joZNzgFeeYuC90dgjGvpONe7+DpKyVnWNJLCOspkL8CoRikMogIwVcS7oewdIZwKoN6n8Fm0hEXJWRjiTKCbYrkxiLepemcjbGwysSyeezgMnpsyMgbxmQRffWpkf8rU2PJBhZe8Tp9hUXtz5BwqTRcozkLRTARcMkYodG/eON/YA/gMwukZRcvCMcZ4kPqx5gOD4dIqn59tCX+3QW+9ica22i/ldi09YRo8djrcwpXWLjMR632PtnyNaLtz4/hjtYv1v8GvQbrI/8j37Xl+IP6zO6mdb6iKux490uzRXreHdi2w/A9gMXd7wDLtxtREjKwY435nq+kBq6oOOdkC8oSXtF1Y8db1+zjrfPVRPv8+uPpEhMSvBgB8vfrEoA51jH2xefmKR3vP0J8YmNHe+A0fFOtgFscaVltu+AsEXxymp+AWt+411C3mSj+W33tNL8zr5s55uFkWbtb6m+ttX29x9MaZp64NP3tNYA52+OKRGv9ytBFtivzCQjrtSxzGqtY5ltdCy3Y8cyI/i/7VkyIi/XuDzHqLtk95K+0sw3PwuBVhPfbumb6X/lm5/VfbOwm13uXB/sT5HYcxoSxKMX+uYWVf/L+2bjeRVXKPwzb9B69Z+2ZX75cj0AbkPMJ+v7PdDok8c223EqeohAGO9tUjJCzQj4v/HKlyYu5jFap68L88iXJe+s7kbw/jespYKMPSQB51YvUU1NvEQ1NSnml2WvHwzyv6qoMslcWFa9k6nlRcVV/iddDryxT5x594MkFly4Ux+KIhEyUDuO6TRtPCW28RovT/A24cYEr4mKmuQ4C7yVoL+VUFCbrOd92GdKwCKXLOm3J1yRtJhcLqBuIvPlFxEn9GZSiMX9UUzHAiSHXN8qYmnbmlW0M6xiByKWNsFsfYRYzcy64uQ18xTBInilwUtH91/qFvG/l/1KzU9w2uEpVw7zNiqCvCQq6E7EsB/JcjFtLSz+8rShxbdC26XtozltrdvISy3puqyxfN6Sphhm6A+YwU9ScSb/YhST1hqKSTesZTugmITEFKQnTlaTki8HaAwqWuKa61vs/mKUMLL5jpntCFbxNMHKYjr2dC5h5RmXsPKAse9asPKkNGPbDtz25c2huRguMIlvW1JwsW2ktGA6Jc8Lx7l3xTqIRHns2Scie76YLOjBCJJH0UvMYLTWWKlfv3eosCgMiXCO6fnvSr4vr94gHPcd/dbNxiTA920SltKz4iesDnAjwYK3XgxWfAW1vJFGJsQy/CQ9wzfSd3wmDoZudxz4BwuPrPBByg6JZVO11dfsKUh6dN5017V9S0b3u65kYGF2VjiclV0otu83Gk6MGHFdTudw27aFXZDWMuEUdx5ipAd3BdhMEtmwBi/G+vO1Hj2t9TAx1Vr1cgJrbeHUGc9G59i8EClWeZeRM+q7aioAI2gqmzD46vWF+X1umnTLDSu7FPQW6e33Tbq+yDtk2qRru1y+jvK/f+9FbqvwHST7PPCddRv4en2ItmnqFb7yotCL21qG87FLuK3i3it+fonY1fj8cCFEZfZco8Zn1MSeakTY4Dt7Ro2o3x7Dvu0J877hk6+7SghtpV21t7fq+7zMdS7zrJvhV1VMhi923FGjvW9c53wHKlH+v76Onz3+bnjnijGfUut7+zS8LwP2wpmNZ+z1YRZw0RP2dNoU0cUqKDbjLiCDTEWS2egGu+k0RnK4kfB5zYg3WKCvab/8msYt7bHH+RlrGqRgeUUqVqzslqiWz/ZDJm1vxiiDXTgT0oX+Qd3/V2vqrDTWDFeO2di5cswhmrN9m/YpfAde0Z/jPS93s+cJYSWmn1EREczhMD4KQBUtoVCzpwvFxZ4uZJSJ8UkHism4w87beBegAQXwZ9dSKi8l55euZ//pOjGBrKUNrIYUIFQxxVyYTZ8XN8cEJ+jCYrXPCReVPOE6pXCd31teR+FCxqWarkPxOkapqrSVyhTb002Asd4TD4KHhXwyBwnOMB6dptjCqszjhGItoTlWO8Na2PpIxmcpshP4GEUeM8YaR44VeyHtC5TcOpWTsP4JMvImABdTc7F+lIodjvhQJJc9zSWXWLAThLVRlGOHZg9pseNDWuzGQ1p+nfzGNL197WAPabFjr3rn6bq951j6aXPVxEFamKe4XDVOlwPST/izWfoJ5zD9hICGqactzulq1o/OYNVWfbQyiOOV5ILxSvavecbVk9700ksvUedXxZN7W7pM6br5bS4YPYo/724qLu9s6XJf96+0U5yvbGNZ1mkadDnHuTw/vpUDf3rePCHLY50u2uZ3jx6HRvHPCNew+3X8pFKvjELOh0+w1MMR3/iAL3zWjtnpgfScRSapzng+W+t38qArAA2o9evRy+/C2bpaZ1P0ciG6tdoNPBVgD+iB7M0D/+Aohw/yJnkUnbfiBtpx5CZp65C/SM+HX5TE8f36ae3pP7T2XKI2lFZHf6BzqTaPPka1qUyPEPh1Zc/UIJ3kgIzH597+f+LPPhMAAHjaY2BkYGAAYqY1CuLx/DZfGeQ5GEDgHDPraRj9v/efIdsr9gQgl4OBCSQKAP2qCgwAAAB42mNgZGDgSPq7Fkgy/O/9f4rtFQNQBAUsBACcywcFAHjaNZJNSFRRGIafc853Z2rTohZu+lGiAknINv1trKZFP0ZWmxorNf8ycVqMkDpQlJQLIxCCEjWzRCmScBEExmyCpEXRrqBlizLJKGpr771Ni4f3fOec7573e7l+kcwKwP0s8ZYxf4Qr9of9luNytECXLZJ19eT9VQb9IKtDC+usn8NugBP+ENXuK1OhivX2mJvqmRM50S4OiBlxV9SKZnHKzTLsntNhZdrr445tohAmqEsfpdeWKbffFKMK+qMaijYiRlX3MBRNU/SVfLQ2jkdrtb+DYmpJZzOiiYL9kp6nEGXk4Z3eeklVdJYpW6I8Xcku+8Ie+0SFzXPOfeNh2MI2KeEktSGP8wc5Y7W0WZ5ReWqU5mwD9f4B+6xb6zxj7j1P3eflW+E79+N1ukyzaV9kkz71+Beq19Dlp9msejgssDW1ir3S7WKjOO0fkXGvmJWujHq5HWdvWc0/pNxfUxWKTKRauBgm6YszTnXQ6mvI615TGOdaktNIksebePYEzZrMG88g326eeyVfMcMxSU6qk3uxt0uMy8OTUKA1PIN0g/Ioqe/W//BB7P4Hi9IeabvO5Ok/0Q0mU9cZcJ36T2IayfpmcUHU6a0K5uI+30inaIm/adUcsx802E74C0holcIAAAB42mNgYNCBwjCGPsYCxj9MM5iNmMOYW5g3sXCx+LAUsPSxrGM5xirE6sC6hM2ErYFdjL2NfR+HA8cWjjucPJwqnG6ccZzHuPq4DnHrcE/ivsTDx+PCs4PnAy8fbxDvBN5tfGx8TnxT+G7w2/AvEZAT8BPoEtgkaCWYIzhH8JTgNyEeIRuhOKEKoRnCQcLbRKRE6kTuieqJrhH9IiYnFie2QGyXuJZ4kfgBCQWJFok9knaSfZLXJP9JTZM6Ic0ibSTdIb1E+peMDxDuk3WQXSJ7Ra5OboHcOvks+Qny5+Q/KegplCjMU/ilmKO4RUlA6Zqyk3KO8hEVE5UOlW+qKarn1NTUOtQ2qf1Td8EBg9QT1PPU29TnqR9Sf6bBoeGkUaOxTeODxgdNEU0rIPymFaeVBQDd1FqqAAAAAQAAAKEARAAFAAAAAAACAAEAAgAWAAABAAFRAAAAAHjadVLLSsNQED1Jq9IaRYuULoMLV22aVhGJIBVfWIoLLRbETfqyxT4kjYh7P8OvcVV/QvwUT26mNSlKuJMzcydnzswEQAZfSEBLpgAc8YRYg0EvxDrSqApOwEZdcBI5vAleQh7vgpcZnwpeQQXfglMwNFPwKra0vGADO1pF8Bruta7gddS1D8EbMPSs4E2k9W3BGeT0Gc8UWf1U8Cds/Q7nGGMEHybacPl2iVqMPeEVHvp4QE/dXjA2pjdAh16ZPZZorxlr8vg8tXn2LNdhZjTDjOQ4wmLj4N+cW9byMKEfaDRZ0eKxVe092sO5kt0YRyHCEefuk81UPfpkdtlzB0O+PTwyNkZ3oVMr5sVvgikNccIqnuL1aV2lM6wZaPcZD7QHelqMjOh3WNXEM3Fb5QRaemqqx5y6y7zQi3+TZ2RxHmWqsFWXPr90UOTzoh6LPL9cFvM96i5SeZRzwkgNl+zhDFe4oS0I5997/W9PDXI1ObvZn1RSHA3ptMpeBypq0wb7drivfdoy8XyDP0JQfA542m3Ou0+TcRTG8e+hpTcol9JSoCqKIiqI71taCqJCtS3ekIsWARVoUmxrgDaFd2hiTEx0AXVkZ1Q3Edlw0cHEwcEBBv1XlNLfAAnP8slzknNyKGM//56R5Kisg5SJCRNmyrFgxYYdBxVU4qSKamqoxUUdbjzU46WBRprwcYzjnKCZk5yihdOcoZWztHGO81ygnQ4u0sklNHT8dBEgSDcheujlMn1c4SrX6GeAMNe5QYQoMQa5yS1uc4e7DHGPYUYYZYz7PCDOOA+ZYJIpHvGYJ0wzwywJMfOK16zxjlXeSzkrvOUvH/jBHD/5RYrfpMmQY5kCz3nBS7GIVWxiZ4c/7IpDKqRSnFIl1VIjteKSOnGLR+rFyyc2+MIW3/jMJt/5KA1s81UapYk34rOk5gu5tG41FjOapkVKhjVlxDmcNhZTibyxMJ8wlp3ZQy1+qBkHW3Hfv3dQqSv9yi5lQBlUditDyh5lrzJcUld3dd3xNJMy8nPJxFK6NPLHSgZj5qiRzxZLdO+P/+/adfZ42j3OKRLCQBAF0Bkm+0JWE0Ex6LkCksTEUKikiuIGWCwYcHABOEQHReE5BYcJHWjG9fst/n/w/gj8zGpwlk3H+aXtKks1M4jbGvIVHod2ApZaNwyELEGoBRiyvItipL4wEcaUYMnyyUy+ZWQbn9ab4CDsF8FFODeCh3CvBB/hnQgBwq8IISL4V40RofyBQ0TTUkwj7OhEtUMmyHSjGSOTuWY2rI32PdNJPiQZL3TSQq4+STRSagAAAAFR3VVMAAA=) format('woff'); -} \ No newline at end of file diff --git a/plugins/UiConfig/media/css/button.css b/plugins/UiConfig/media/css/button.css deleted file mode 100644 index 9f46d478..00000000 --- a/plugins/UiConfig/media/css/button.css +++ /dev/null @@ -1,12 +0,0 @@ -/* Button */ -.button { - background-color: #FFDC00; color: black; padding: 10px 20px; display: inline-block; background-position: left center; - border-radius: 2px; border-bottom: 2px solid #E8BE29; transition: all 0.5s ease-out; text-decoration: none; -} -.button:hover { border-color: white; border-bottom: 2px solid #BD960C; transition: none ; background-color: #FDEB07 } -.button:active { position: relative; top: 1px } -.button.loading { - color: rgba(0,0,0,0); background: #999 url(../img/loading.gif) no-repeat center center; - transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666 -} -.button.disabled { color: #DDD; background-color: #999; pointer-events: none; border-bottom: 2px solid #666 } \ No newline at end of file diff --git a/plugins/UiConfig/media/css/fonts.css b/plugins/UiConfig/media/css/fonts.css deleted file mode 100644 index f5576c5a..00000000 --- a/plugins/UiConfig/media/css/fonts.css +++ /dev/null @@ -1,30 +0,0 @@ -/* Base64 encoder: http://www.motobit.com/util/base64-decoder-encoder.asp */ -/* Generated by Font Squirrel (http://www.fontsquirrel.com) on January 21, 2015 */ - - -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 400; - src: - local('Roboto'), - url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAGfcABIAAAAAx5wAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHREVGAAABlAAAAEcAAABYB30Hd0dQT1MAAAHcAAAH8AAAFLywggk9R1NVQgAACcwAAACmAAABFMK7zVBPUy8yAAAKdAAAAFYAAABgoKexpmNtYXAAAArMAAADZAAABnjIFMucY3Z0IAAADjAAAABMAAAATCRBBuVmcGdtAAAOfAAAATsAAAG8Z/Rcq2dhc3AAAA+4AAAADAAAAAwACAATZ2x5ZgAAD8QAAE7fAACZfgdaOmpoZG14AABepAAAAJoAAAGo8AnZfGhlYWQAAF9AAAAANgAAADb4RqsOaGhlYQAAX3gAAAAgAAAAJAq6BzxobXR4AABfmAAAA4cAAAZwzpCM0GxvY2EAAGMgAAADKQAAAzowggjbbWF4cAAAZkwAAAAgAAAAIAPMAvluYW1lAABmbAAAAJkAAAEQEG8sqXBvc3QAAGcIAAAAEwAAACD/bQBkcHJlcAAAZxwAAAC9AAAA23Sgj+x4AQXBsQFBMQAFwHvRZg0bgEpnDXukA4AWYBvqv9O/E1RAUQ3NxcJSNM3A2lpsbcXBQZydxdVdPH3Fz1/RZSyZ5Ss9lqEL+AB4AWSOA4ydQRgAZ7a2bdu2bdu2bduI07hubF2s2gxqxbX+p7anzO5nIZCfkawkZ8/eA0dSfsa65QupPWf5rAU0Xzht5WI6kxMgihAy2GawQwY7BzkXzFq+mPLZJSAkO0NyVuEchXPXzjMfTU3eEJqGpv4IV0LrMD70DITBYWTcyh0Wh6LhdEgLR8O5UD3+U0wNP+I0/cv4OIvjvRlpHZ+SYvx/0uKd2YlP+t+TJHnBuWz/XPKmJP97x2f4U5MsTpC8+Efi6iSn46Qi58KVhP73kQ3kpgAlqEUd6lKP+jShKS1oSVva04FOdKYf/RnIMIYzgtGMZxLnucAlLnON69zkNne4yz3u84CHPOIxT3jKM17wkle85g0f+cwXvvKN3/whEjWYx7zms4CFLGIxS1jKMpazvBWsaCUrW8WqVrO6DW1vRzvb1e72so/97O8ABzrIwQ5xqMMd6WinOcNZrnCVq13jWte70e3udLd73edBD3nEox7zuCc8iZSIqiKjo9cExlKYbdEZclKIknQjRik9xkmSNHEc/9fY01Nr27Zt27Zt294HZ9u2bWttjGc1OHXc70Wt+tQb9fl2dkZmRuTUdBL5ExrDewn1Mq6YsX+YYkWOU23sksZYFqe7WqaGWapYtXfEp90vh3pH2dlViVSvy7kkRSnM9lH5BXZ8pBn+l7XcKrOvhzbaTm2xe8RZOy1uwak2imNvGn0TyD9qT5MvZ+9pMD2HUfsWy2QlhntyQyXYV+KW3CWVU/s0mJEba4Y9SZcv6HI3Xd6hy9t6yr6jYlfOOSpMVSlSVdVcC51jIVX5Df2ffCT5OLIN1FCt1JVZY9vnjME4TKBDgprStxk9W6ig0lXQmSfXWcC4CGv5vh4bsZn5LuzBf9g7VD4rKBcVbKBq+vPUmEod7Ig6WZo6owu6oR8GYIilaqglawT+w/xm3EruMWo8iW+p8x2+xw/4ET9hHzKom4ksnMN5XMBFXKJONnKQizz4YZbmCA5CEGqpThjCEYFIS3aiEG0DnRg74sQyxjHGMyYw+jjjIj8KojCKojhKojTKojwqojKqorE/z+nO2BO9MUb5nXGYgMn0nYrpmInZmIuF3GMLdtB7J713830v/mvJctXYflBTO6Vmlq4Wdljpdpj/4g/OOEzAPEt3FpBbhLV8X4+N2Mx8F/bgP5yLp9LTVMqgytdU+ZoqTzvjMAELmC/CZuzCHvyHffGqaZlqgmSkIBVpluk0xiRMwTTMwCzMYb20IuRTLDpZsjqjC7phAP6Dm/EI64/icTyBS+SykYNc5PEOfHCRHwVRGEVRHCVRGmVRHhVRGVU56yi/wiSFq6y261m9r1/kMOulwRqmUfQtyt3S1Rld0A0D8B/cjEvIRg5ykccb9cFFfhREYRRFcZREaZRFeVREZVTlbLT68emHkREchKA7eqI3a2Hy2Xq5eAxPgndPvgmSkYJUpLG/MSZhCqZhBmZhDuuuuqu0eqE3+tlqDbLd8jOarXYEByHojp7ojcG22xmK4RiJ0ZwJCe/NrRSxN/pFFVdhyb60bMuyzXbJXrNVlq04e8TuVVBhp0VYsn0S5P6T3nhKrpKCrp9qP1gan7daSjD1/znsjDdmSMpvWQGrZAMyL3Nbwu5Qonx2j70vH+MzZCqKrD1nhe0/ds522Xbzkdlnx6+5e0pgd7x9bdaW2Vv2qf9pyeb4M+x7xj6WpHz6u0gEYRevq7vQjvtftzNXs5aNxvqbsNS/XcmmBmHfev8pgvEFlML3OHh1nfG4nRVhaVc+EwL+XnZek0m3k3Y341tKUpLttxNy5dq9ircaImsp9rnt432+ZB+y70rwVqlsGd7sB2wQWbwvwo56K6fpefU+3n7Fw8teH3ZehL2hGwrLvrGddvL6ftLfzb23f0E3FHazgguvny2+Mj8XsJ721786zgWE/Q8XFfh3uJB8lq6AsA3IuDLbF7Dq7Q8i6907+Ky4q7133XyzN34gr4t9aU9fsz5QwUWIGiiCR4rlceTjCZHLE6oKqqIwVVd9RauxWpLroE4qoi48xdWdp4T6qL9KaiBPWQ3lKafhGqny2srzB6PljBAAAEbh9+U6QJyybXPPWLJt27bdmK8SLpPtsd/zr/dcdaRzuX3weR9dvqmfrnUrfz1hoBxMsVIeNjioHk+81YkvvurBH3/1Ekig+ggmWP2EEaYBIojQIFFEaYgYYjRMHHEaIYEEjZJEisZII03LZJChFbLI0iqFFGqNYoq1Timl2qCccm1SSaW2qKZa29RSqx3qqdcujTRqj2aatU8rvTpgiCEdMcKIjhljTCdMMKlTplnRuZAJ87LVl/yp7D78f4KMZCjjr5kYyEKmMvuoDGWu19rpAlV6GACA8Lf19Xp/uf89XyA0hH1uM0wcJ5HGydnNxdVdTm80YAKznTm4GLGJrPgTxr9+h9F3+Bf8L47foQzSeKRSixbJMnkSverlDibRndmS3FmD9KnKIK9EbXrWI4U55Fmc0KJ7qDDvBUtLii3rOU3W6ZVuuFpDd39TO7dYekVhRi/sUvGPVHbSys0Y+ggXFJDmjbSPzVqlk8bV2V3Ogl4QocQUrEM9VnQOGMJ49FMU79z28lXnNcZgFbzF8Yf+6UVu4TnPf8vZIrdP7kzqZCd6CF4sqUIvzys9f/cam9eY9oKFOpUzW5/Vkip1L9bg7BC6O6agQJOKr2BysQi7vSdc5EV5eAFNizNiBAEYhb/3T+ykje1U08RsYtu2c5X4Nrv3Wo+a54eAErb4Qg+nH08UUUfe4vJCE21Lk1tN9K0tLzbhbmyuNTECySQCj81jx+M8j0X+w+31KU1Z7Hp4Pn9gIItuFocAwyEPkIdk0SD3p4wyWpjhCAGiCFGAIUz7OghSo4I8/ehXf/pH5KlcFWpUE3nBr8/jPGIYi5GmJmjiGCsIMZcC7Q8igwAAeAE1xTcBwlAABuEvvYhI0cDGxJYxqHg2mNhZ6RawggOE0Ntf7iTpMlrJyDbZhKj9OjkLMWL/XNSPuX6BHoZxHMx43HJ3QrGJdaIjpNPspNOJn5pGDpMAAHgBhdIDsCRJFIXhcxpjm7U5tm3bCK5tKzS2bdu2bdszNbb5mHveZq1CeyO+/tu3u6oAhAN5dMugqYDQXERCAwF8hbqIojiAtOiMqViIRdiC3TiCW3iMRKZnRhZiEZZlB77Pz9mZXTiEwzmNS/mENpQ7VCW0O3Q+dNGjV8fr5T33YkwWk8t4Jr+pbhqaX8xMM98sNMvMerMpfyZrodEuo13TtGsxtmIPjuI2nsAyAzOxMIuyHDvyA34R7JrKJdoVG8rx9y54tb2u3jPvhclscpg82lXtz10zzGyzQLvWmY1Ju0D7yt5ACbsdb9ltADJJWkkpySUK2ASxNqtNZiOJrxPv2fHQJH6ScDphd8Lu64Out7oeujb62gR/pD/MH+oP8n/3v/PrAH56SeWH/dDlxSD+O+/IZzJU5v/LA/nX6PEr/N9cdP6e4ziBkziF0ziDbjiMa7iOG7iJW7iN7uiBO7iLe7iv7+6JXniIR3iMJ3iKZ+iNPkhAIixBMoS+6McwI4wyGZOjPw5xFAbgCAayMquwKquxOmtgEGuyFmuzDuuyHuuzAQZjCBuyERuzCZuyGZvrfw5jC7ZkK7ZmG7bFcIzg+/yAH/MTfsrPcBTHcBbPqauHXdmN7/I9fsiPOAYrORrrkQaa8FG4aSvBgJI2EBYjnSUiUwMHZJoslI9lUeCgLJYt8r1slV1yXHYHuskeOSLn5GjgsByT03JNzshZ6S7n5JLckctyRXqKLzflodwK9Jbb8lheyJNAH3kqryRBXssb6Ssx7jmG1cRAf7EA00sKyeDgkJoxMEoySSHJKYUdDFCLODiiFpWyUkrKORiolpcqUlmqOhikVpO6UlPqSX0Ag9UG0kwaSnNp4a54tpR27jHbSwcAw9WO8n7w2gfyYfD4I/lUPpbP5HMAR9UvpLN7zC4ORqpDHIxShzsYrU6VaQDGqEtkKYBx6pNAf4l1cFaNc/BcjRfr9oVySE6A76q5JDfAD9UqDiaoux1MVM87mKpedDAd8CAEOEitLXUADlC7Si+A3dVnov3sq76QGPffTGbJAmCOmkNyAZin5hEPwEI1v4MlajWpDmCp2tDBcvUXByvUGQ7HqDMdrFRny3wAq9QFDkerCx2sV5c52KCuEz2HjWqSTQA2A/kzOdj6B09lNjIAKgCdAIAAigB4ANQAZABOAFoAhwBgAFYANAI8ALwAxAAAABT+YAAUApsAIAMhAAsEOgAUBI0AEAWwABQGGAAVAaYAEQbAAA4AAAAAeAFdjgUOE0EUhmeoW0IUqc1UkZk0LsQqu8Wh3nm4W4wD4E7tLP9Gt9Eep4fAVvCR5+/LD6bOIzUwDucbcvn393hXdFKRmzc0uBLCfmyB39I4oMBPSI2IEn1E6v2RqZJYiMXZewvRF49u30O0HnivcX9BLQE2No89OzESbcr/Du8TndKI+phogFmQB3gSAAIflFpfNWLqvECkMTBDg1dWHm2L8lIKG7uBwc7KSyKN+G+Nnn/++HCoNqEQP6GRDAljg3YejBaLMKtKvFos8osq/c53/+YuZ/8X2n8XEKnbLn81CDqvqjLvF6qyKj2FZGmk1PmxsT2JkjTSCjVbI6NQ91xWOU3+SSzGZttmUXbXTbJPE7Nltcj+KeVR9eDik3uQ/a6Rh8gptD+5gl0xTp1Z+S2rR/YW6R+/xokBAAABAAIACAAC//8AD3gBjHoHeBPHFu45s0WSC15JlmWqLQtLdAOybEhPXqhphBvqvfSSZzqG0LvB2DTTYgyhpoFNAsumAgnYN/QW0et1ICHd6Y1ijd/MykZap3wvXzyjmS3zn39OnQUkGAogNJFUEEAGC8RAHIzXYhSr1dZejVFUCPBW1luL3sYGQIUOvVWSVn8XafBQH30AbADKQ300kQB7UpNCnSnUmfVuV1TMr1pMaCZW71Si7KoT82vrNi6X1SVYEa0ouNCPLqFJ8AFyIIN+T/dgzE0iUIokGJTUO69KpuBMMvmulUwJ9if980h/ILC56jecrksQA2l/AS6aDaI5OFmKat7bdan+r300lAkD0LoNugWfkJ7RNiFeTvHgv7fG/vdo5qh27UZl4kui486bLR98sO/99wOBPNFG3DKAyDiqC6qQppEoQRchTTUFVEFRzQH2NsFt90m8QUejsbgE6/BWmkLX4fd5vAECkwHEswxtfUiCghDaGAYwpgatwgYKG4TlUKoH9digHpejYQwHP0NtmJaogVAjkyoG1IZ8r3gbHWBia+bwxWhFrRPgrS2gmhU1Xr8rIaCCoibqM404fhfD7va77C725xP4n8/h1v/cApslQXqrW0G3H9DSgVJs2L2gO5q7L+9+4ssON+52W74RzR3oLVxHh+O6fBy8GDfTgfxvMd2YT4cTNw4GQBhT1Vq0yuuhOQwPSW9hYllqBE5hgxQuI0mxcHotihoT4K3CW82O9wQiilY3PEpR1KQAbz281Zreu8KESvd4PR5/ekam3+dISHC40z3uFNkRnyCyQbxscrj97LIvPsHXNkPoPXft+Y/2b31x2973c7Mnz1qAbbY/e/y91XvO7l6Zm1OIk/8zy/fo6S2vnom/es1ZcXLp69PHDJ86ZPLGEcWn7Pv3W788tLhwFkiQVfWtlCMdhFioBx5Ih3YwJSSrwMQTamR1s4Gbycq1JyqgRqVpVrEaNp/TEsMjt6I2DLD9Zj+0ZuHphorW5t5I87t1jfSnaZmCm//KTGvdxp6e4Wub4GCCulM8fqcupd+f7mEMYHpGsn4lOfIC50byojNra86C17bOnVeyqHfXTr16ru5J7t+K8rattJLPdO7Zq0unPtSURQ5niUU5JdvzOs3funWx6elhg3t0eXr48O6Vp3OKty3ulFO8dbH8zLAhPbo+M3TIc788JmY/BgIMq6oQf5EOQCPwgg8W/IUeNGCDBjWKn8gGiVwpUhpwpdCaWRrwTkhpxjulWQrvrKFJe+iWuqEuwVqXE9FA0ZLwHk+uJKuuWoy8sJpwojK5mnC6uFqYMIMphcnp9sqMusZS20w0ca0R4p2ZGRkhooa98Nqgxw5sKzzQZ+xIfPzxrdMD5YO6Hn7+PKV4cdU0usG1dW3KpEmPtx36ZPeBuDBLfWHS8k6vf7BzQe8Xuz9DZ87bVLXt9oTHOnz6xDgsTpw+b9Iy4fOBy//VutdD/6fPWEB4XnRBUPc5SsjjSNUeh4HlPibomIsvSivocvwEEBbQZuRFeSRYwQJqnTRV1DffZst0ykQwKfYEp8njJQum/jjXs3KvBZf2eMGzYGoFeeZT3IzPdZw2jqbTz3rQWfRmycDxXXfgcwAIHvbOzFrvxHhCTN4Mm92fTog3M8FmI5kv/DTfu24v6b1hsHf+D5NJh0/o8/T1LuMn4U+YlnwGs7BRt/FdaAkdCggNyCChh6RCHUgO7bvIdlfU9z1QlwWSRNXCektaIlsqNVNi7jnVKdlNguDFrvRMK2xlWRuFTVvRk4dm7Hl7pnCx75px2Ju+Mqbo3/Sn/phMv/w3R/40rBTTxXchGuoBe5kKuvuQMWxfurtzuKxuK3N2Vh/ZiIV0xB46Agv3CLE7aTqe2InFgNCQlmM6XAUzOPmbNPFeEOEvBc6yV3ct8XJuVn/xnSG0vHPO4q0rhh3jOFJJEokl74LAOGQ7p2GkY2ILk1iaiF+RpDWAsJzFsUlwmnFdP8SMiTFj0p2hFH4qk0crBw9Xy9tn339/dvtBrR95pHWrhx4CBFtVjqDokdAODFpkKGRPOt3o27WJDNw4U24JQGACs8IoZoWxbL32oRWj2M1R7Oaws+I2GKVoVjR4pkgpFOJOIYJfsfna2uxe3S5MVt2dZIpR5RVfXxfLv/u2XNg9v2DZPJK/OH+BQEbTvfQA+tH3Bz6K7ehZeij224sXyumlihvnbgJCCQC5LL0Hcg0uiUGR/pxsgMQNQkzThLB1E4FPspzCbZX8qT5yeQ9dTGwNxdP52w4DIPQDEH1Maic8BcaAa3i3MyLSBDRBcfKVFEWzhOcVHps0h1MJrefyY41fYDGmse5GEF2ir7Ij3hrXY9GERWt3o3D5eAVLa6aRqwtI69mbemSv3LDk6K3zuy7Si7QPIPSvqhBuM3SemogRywDF1qCrywZ1OTqI1f0apGkfA/bTNgGO19L4rwGA2WqsQdNj9cwNFM0TJsnuAf58XUVtEGCtlhS5oT4mhhKSosYZ8kgpJjcORUkupNeNuYtzCqumFOwOfnTqm+kjpuRUAR1Oq/YUzspdtn7VYqEtyc1GyB//5udX/jtAa+FRZx/4ovzdCYuW5MzOI0DADyB2Y7oaBXWgizEChN0ClxUtIseKzAGGhWJZDvIsRzPL0XpCqd/EwTvcukmjD11Wk5B77NieYBZZcjA4Fw8m4Ndr6A7sPlr4qbI9OdYEENYxG2jJUDSEQSEMyJZFhiFMPrcAVDQxzJ4pFjkiU5pWLzwpmeqxSc62NcB3ID4M1sSjN/MTduZvBEapzRFPWDT2+hKq2XSnmEynupJvgm+1GJl3+JtfrpT9at1pXT5p7qpN86d2aEOukAvb6YSH6e3rN2jwwoczZ6svrdzlbwIE5jP8DaRdEA8u5vPCKlxbAr7/GCkBVEvgiFQUrUGkHjjcsmi6Bxf8fgVSBWbcjholEJ5JuVQF8RMO7/vst1OnaSX2wn+dGbA56eWpMwtWSLs2iLduzKe/nrtBf8ZHg51wJRZLwXHZPR9/+9r7LxbuBmQWCGIqY1+GtkY7D28Fxy4pkQYO1QaO6OYeVEwNvvZf0qeyQrgkdb7zvpRYBCDAOMZLHd3KXdC8Zm8d7IUO9vawsnH98locnAsvsyUv9ovcUqGel+tWnFffWUukmagORUuJJCtkJKEsKyKTEHimpfOFes7ZNoPRVjFhcPaCqsCZ4NzsQeMqykq/W/PSnTWrcuatpt+MXrigfMEiMX10Ses2H0z+8PqNDybta9O6ZNT7ly5Vbpm2rujWsgKx3sKJY/Pzy5cAEBhaVSXc0uVsDL0hXO7USGlnAzuXUrBzO+FpBAj6L7tBRQ1OXY2u5RF4BqRLxLXB6lBAcvuZl0hlLt5fk00LD923ZeCsvcPHnsi7dJuq9M3G3s9/p9/329B449RpqwvInA7PzbiRt/KbGfRD+nUG7UWnSuvFL+9kP9f13Zt7175YBlVVkMsi4GjxcfCA7XdAE4tnfwgTQInwhIk8kLE7m7Ko3IPd6WX3fCJMQBmUGAAlIsvW7wSEzvCRME3sCjIkROgYu8r8up5LoeRAPzrQTLIrTzG3NT94AKevxGkHOL9FWCBcET4GAUyQCsxgWOKgkxhp3ZpYK6rzlEK4UrlPeIz/Ca22BEs3AyDkwgHhmvhEGIsenDkWKaBKHIuOxC/UD44UelaWkEUo7KO5K+mCUiDwRNVvwiS214nggmf/InYls0Ey3+v6UthY6itchUUF/jZ+QSh+seCVmXkvfmWEPL+Jpbzh8ngYaftUznNjsobP2E0+e/fDsy+P7lJWXS2vm7zouYUDRmdNHvXvlw8f37WzZNSzRfSj6vIZCIyg98sXpDXgh8fg/4LaNpSbmBlis14BBbS4tmYOMS5Nk8xx/JdZ0dqTsL0F1LaKVj88wUrWZgG1WZrmDs/FKdojJFJvmd/y6sqbmWHjEjkFmeclNnCliMQk20Q+cuoJPrHbbCxoizaU9dwl086ZkI/FXHpnrz9jcddlK+1xU/dnPTunW7p91fglsp3uptpReuTt6Jjl6D3d950HUh86mXWHFr0VE1OOM364jUN33P25zrO9HxjbGFu1e+SFtfj7z/SrbT3+9dXJ11BY3fzh4IUvr7+NC7DoMM37/RZdVdbCPcHb9gZuxfpox/d+uE770uXLioYPsOAfDb/nLDYAkBpKKpggCjrWzp5rHxfIbCBzdbCIRPdfkVqrRemToZIffehmvXAyuDH/EGmxjbQ8GHwKf7iFM+h8dujSjdQjxSBAMYCYp2fuCZAEPQzxsnb2BHqEdKZpceElzXE8ieKRSAkrIRpdjc/qCmccshvZkCUjrlRXKE66ivHadz9MHDopn35FD+ODuS/RT2kppsxas6SA3pTUA6XDNzR37Z5z4DopDv66eBqa1s0aNWU0AMJkFhEuSQcYhx2MftKY67ITkrgAd4A2g3OsGzliSRNXLtGdDFZ/OtcacLo9TF0Iq6ZteuJ7qT698T2l9OgKjNr5FSY6y+puLXz/9CFt8/YGeOrLu5iNGUuOY/prNPj5jvX0x7tLv6NfrXgbiM7yIcZyNDig/T9wzJmLCaNirMbW4lG0OVnkFk2ClXltVtoTbzG+tA8bb8JN9PKBs8fK//j6gqRuo8eO9jtFj71OJNvdxRhf1eMW2gkA6kg66kiehrBG/Sk/ixZlvq3RBqcoKoZsTdHMBhdpdTmq/4TrwXzyv8ohwqpgSzKZbAlWbpDUjbRF9fppbH0LPPIPuq5ZiBhW74j1ZeOK7ur1TgQ3lAq5wfvIEJITnMnXqgMI05h2XGPakQSD/7+04+/qIa1RKLo2Sns7rlFSI9Lv7YcbPcM6rWEEmlRZ5A7H61eA7ZLTTVwpRKjWHB46xGtd6R+qRivWEPRhwk1MSCrNoOVlh/H6/lEv++lOouwfkbUV04/Pxi444usL6KI/0arJv9FPWrfHTutD3Elmfe96GPfOUOYZFMqwqyrwqoGTusmC2VqaBftFbKheXXFKfaz1SeayYEppKSkvY9s3QFKDy0g215/3WDNZr0Yb/sORsf4uH04uLZVU/pSfVUAn2M84aGXMZ8PBm+Nj4KRIA+CpvzWUfvlCxacQXXb39OWfS/PnTV6Fknr39umK8iMzlxQuhGp+JJ2ficbMM1x411Y041kyEJ6FPmLtCn1hBEyDRbAOSmAPmPtp7YGRJUuEX7dnyB3lnvJweZKcKxfKr8vvypZ+DKtJJw99iG5SX2PkLfwq+BEZ8QV5bTeNZxS2JoHgzMqz1VbQgCGVoMk/WQFE6hfXdB+OIFrl0rINzJ6qJZa76967j5FXw9YYlMAQo8Mn1Xw5BFE/4A91URCqvizEx+SyoxvtrMcteA2v3S610ZRV1G0vZXvwH/FVFk4yydC7w8Si4KbgUY4trK0WeFLDKG5Axk0JA6mtPQbz1IgEOiq944qFnGYMqai7rIx8sl8cfHcjA7JWfB4ITKqqkCzM6q2QBO2N9baRiFglslASaxVK8aTantNDGYTDq5+JmHSTtmVKluX0lvoG/X0VWYnRb+zE6OX7A3vfPS2c3b3nhECKL9CybcXY/lTWGXxsezHdf56ggA767e8j79IbGBeE6qhQqlfLdnhKi4rXS5YonsBBmILahZMWLeCfXbMQjm0cPaeIeSFW37uro6zXhVmlpO4PGEf/+IMWY591r75aQNeT+4IsLv169NznG1bkz1svAIHRVVGSzPhzQApDZXY3DuVtat1qVFYGxGrYP45KMFv5fVZDVGXZXrKRU5NkSpX/jtdkRivmTkUxh57s3O0etyrjtvTkvndOC6dxIuf2LP2454mpv9ru8VtCy84j+8/J+b1Dr1fzuw1APKpbhxMGaVKifrwi8S8k/2B0hgpbU0JplmJIs6J1y+Aak2AMR9WkyyZ0uLGGd7KflpThp7+jZVUO9jwVHIPeguItRfQKeSr4lqRev5B3rG2wMIZ8s3rGwuUIgNCNxa1sfl7EUIO3CVvL4O6NH45UmR+ZsFarE0boqaeHb4+hHKzHP6ew1ljj8hKQbcSfvqFw7a9xu+ke0vOPG2i/Vvjt3LJta5dtWoMjTw6hFV8WUuaMPnql6OVCkt/p46I3bkw8MXX+mplj+0wfPv3VsbvOTzgye/7aGRde4FK1ARDX6HluK6M4RvplxRDyA9XE8gi6hrbYT1uKwyXbne8l20ZAWMKYKmHvtMEDmmSPZzIb3aDhBMoQa7Q6BnORwWRKAS9z36FzEKtYgrTqmu8HepPs27HllTcltTLlFL2jECSfCtcrPRt37tgoXAVAnr+LQf28o50GJl7vGBM8g9MzujZAQfdpqXqy7iPs69qZ4M2S4Oenq8Rdd7qF/OiDAPJ3uox9DG7B6EANphnOB2oUOo4N4nQfL0RxbyqHuli9YwQ4M9HHGjvH4TVxMPhZg6aY/DLWbZL0aRndtJOeczrp0Z10cykeL31TuFVpVg8IN+90E1PHjr17leFDaA8gntLj70gjBWE8tZ2w8UgcUOTx1ZILhfA6vAsiC7nVU/nyWrlY3i2zKQFkjt0iQwi7HnD1/31kPvb7lKbjxZt0HS36DC9R3w1hHmkVbBVMIe2CR0g5OcM5jWNI9zKkZmhjRBrGY0AaBhdajwdCHxmGM67QqFIadY2cJ1crxwZvkCRhBX9/TwBxmh77Hoe/Tz4ifYoI3NHwcwcpPGmRTGwyFPv9/AzCge2FR+9eExpV/iD8sWHDcnHexqV8vZX0CImW54AJUoAhVk2182YhUttZ+ORZM4nev58uxKnSV7enFJne5+9pwr41tKv51kDSIm2JPci1o4lKBqqSeptnMRZ6BHP0VVP1uzFNJZH4VTQm7HZ+hsKSCQtOo7llZfKcW52L5Dy+7iPkshCv25DXYENhVQ9oaOLGwheRuFOornBL9r2BzWdjs+3iXtqIXAw2BQSxKksoAgAB6ke8pnZCJfHznKLKUcLqNWuAa694Ca9IFARwg4q8yMV+9z5foRI6WXo7jiQRwpM9vvyVTZR+wh7zgB43K4RvxKehETSBqZqzaTO9WFbU5Opo42QgnIm19d9QYROnnnlF845HePZ4ZK1ti3ZWx50kw7GeOzKH93h5vsx9uu/edwv94MdpjXc69NM9dzI/2muiRM19a/NJxK/fnjh+SO6eCQcn7T0nemh0r/XuFfSNicndc99ZXLy3x6AJQzs9u6b33ldpnRd7K0v7di4/3GswEN33JssAdaAuDNVs9epzbDZFFQLAvFI4s0w0er1a5xiSWdCTzRjeqTG1S3SnMX1gJz8mnmNnJNusXi6dycrdtZh8s/TkOEvJ7nG46Mbulfnvdevx9oLVxHqLnl0xU4bgR4vpBRqUPjxVQluUnAKE/7C9qmB71RC6aEqjJLZ0xNFbYu3cBiIzGiYfP2SLZ60RHqfWV4dBBKu/mnG3R98AxjZ5aMhq805p0sEx/6N3J15e/e5P5p3mgqylL63LmdK337ah6EVI2vh73pUdWQuPl7r3HuMaNYCh/FEGiIN6jOHE+g04RYkhhuU0w6moIZE3opeEGJ1hveMM2//2s589neW2TsavmysRCf0DgkwrF2JAxf59Y3eXWMYe+uC73UW56rP/eiOviHhuY9o8kn4HJuZh+i3T+4GN+NPaMxx7P4b9F8awg3GcpZl1jjl7LPcKw0usbQD1zMDvq5f29v56H9cj/WodhigRH7tCd5qNOZiUAv57J9quhITQSSCmyCaX3+MhT12jFdP/N/fsN0G3+NaiwXm+8Xn08rgiG2lkzotH188pW4IF9BsafGrzwW6P9T4tHHtlVZ2lLwHCAwDkmOxg0gzR4hK4FUZI0ShSwRMjQ3Ft+TjfaEiPYyOdpWoPML3i5zzsJF7/1OA0hRSIfwD7cvv2PSWPPByV5u87+Msvhe0FY3fssxZasgZnF1T2AAIDaU/hZ8Z4XWgMOVpKqofzk8KTQzDAC9tfYmT9a+ODGjcV0hsup/b/uHsP8CiO5H24umdmV1mbFwSKC1qSESjawiByjiYbBJIJJgsRDrCQwRiTBAibIJJE8JGxEWPSioyJ4mxEOM5gnI/D2RecpW193T0rNL3Ahef7PekvPTubd7t7qqqr3nqrNtzJQjcRHlHt/DlmniIFYYp7RJjSfAG8O03jojC5SqsVq6yvz17MCdzz242Zn7bKmrV/cVHOmVPflK1bfOC5gXsXU/nyoqbLZ1d+euOfowfnrF6/LHM+SvzX0etb0Peb+D6+HED6xABgpnocZLHy82JKEFB4wevjd8LonbDacJ/tWUF6M5OaFMMiXa67PKRHnfIuoMGSB43PeX5JvMcjHS0i+d4U/KeZU7N6VzE2Bwa2DY9TznO+WhvVEBpGP5m55kjPrHtEHnANScigCDCMjr420OO5rOHxcjqKfqpNm+effRZw9WnSAw2l3xcCDmbDnHV4mMK4ffAE00tPsA6wo4aAwe/2BNWk6B1hU2ycO0VzgSUmgdogepD7rZNjktu0s6alpNKxpMrpld3IZcuagA795eMoulkGHxYgtg5yiAHouGbqgiymIqLWPxmDCeAYiz0d/FGYcgii/qDv6UchmIuGoFoQJk1zCstmeDyjUL/PyDB0+w76aQ5ZaICqkbPQaPKsdxkg2AyABhrAD82Keiyaxc6EAdgcCwAMs/nuMUuVuWUTNewJBk5Qt5p52+gdW82devROPe6lB/AEuMKvSgMEcL0O836czDik+iRVo2ewG644doXSlVnlXzyX+tYf0GiDZ0L+i0uCyx4c6eCR02cvf7t3FlnsbYrLZ0zPG+dNxBe+3VT1tZxeo0t0VmborwZbrOKsxIkIm/ijEQZzz5k1CNZrldNfrVArw9zLOrWS05ds1qsVHRRgGEa9jGQ6qnCoBx3UkPqRPg6rVR/D+2+AqlVwfuuKjDC6dMAYctQUQQ1Hji/hsPxPCj9C5jmfvXGP/FC2a/mKnXuWL92N3VvIMvI+CS2pXI4SqwIP3f3okvrRXeYBkSw5io8tAqaoVm1/tjL8RtBBXRQqrJzFPxxUQkRf6DE7tegLMVFnkiA6Q1Gfn72Q69kTmHvl3S88m5fsHtB/32vF2PwLuZHv/UW5O3s5uUt+l4/eWuutXHOT+xkkS/rBN4+Jop/xH3YOLuQWYfX9PY7/6G6kMXjxEXfj6wtncgKoQ1d2/itP8Ws7Bg/ZvqgEx1ejxq9M/j0ey7NRy6qAsltvYEvhnzXZxUV0BqHQWZXDWKZRB/gLg/XbEbj/jHURV7CPh8CX07e8TlzUpOWRdp5D0rBdqfWlNcZNXpDT818PA8R9tONyb47VBGpYjXC6BeKjKtWvIcCGUhxeUGtJQCPrm0pjK+hRbSCSXhvUcBD8Ga88l69xTyScSx7s6PPZgWP3y155Ycy0Cci+v/+XngWXcz1KwbTx81B0j/7PDpjR97Vjp9b0nDKkS4eObQbNGfz6geE7sjInD2RxXfW3eJDSFuwwUg1zOEVEo46ehFDnUU6NRqBjoZ8ksFAC9FNldBoLs2Nm5tnw027nYQvzfMxocXl5aruYp7t1mvvyhQtKW/J7oTe7XbuQdbZ1y/CWQmQABEvout+jJsJErRXFMESMTBiWuN3oCdka6Qo/xgdoyAbD0SAmkFRApUaTrr91GHku3+rsKZ0478oFfMbb6ecSyVp5EQBBLIBUJqc/HgMSRK7OIxiQImBAlF0ZcpLMXUFmn6yUMiovMiuIoCmAcpPeDIEsVQkN8/98Ub5FyX9y6AXBEt9ktKugYN84OAbEhmK1JsndKzzkwjryWzWsIxeP/blqbbXUqvKilFz1Jzm96rbUBBA0BpDK6diCob8wKB3qU+ffoz5BMoek+NUj6I6VbeSSxNAd9MvfPyAlaPLt33//C5pMSm7jA6jA+5X3I7SWTMQu7AQEDtJDKqWjCadeEZjM/iul8wCF08KcIwhjuq8nUwDTU20M2OV2pzgZhYCO4/uqi6TXmHuuTokjxsc1Ji+Xo3CpaWU0+acUuk7uOWaK3BwQDAGQ3qEjETGgOv8HGFA6nlO1Aw/0HpKSi4qWSHU3vMoxFPIGLjG0hjrQUrXWjeAzD02guqgjhkUbWRZLqo2iDPzDOQqckuxKSUxJSWURk5myRCiL3OLEsw++c+sWPvBO/PVdu6T3yRuJ909c+tfr/6w4+lnS9A7kb+VfDH3+/vvku/ZsBAcoJ6zjE5mqiPlQHdeuJf80nGKvttLxTvONV9HGyyCPOpQxH8y9WTMdr5mO11I7XsVi5uN1plKmchods4nGFQ6aEU+yx7Et3Wi9ajx8+Hr8QRXdunX4QGU7FHTvwYDnvrqKIjpMT/zMc+OH1/9VfuLzRPb9r6I35B+kOHBCe9XMcwNQ68g4OOZUGs4DfVuC3paF+9uyYCYizAI3x8wiG7l9djipsKTIPxxf2nX+nu5Neg/Ydqyg5/LStpE9R0qBJXdS1jSYOAJvfb/ttiA8YyRgKCDr0Vi5F48fEnXxA1QwaE1QaaHkBTNtYdCc1WVlrjqLG/bufljxgvdXfqv09EUNiNYwBFMmajzEwnMqxLnYnGu90Dr+wLGxQg99BHHow8ZsNzvWYUe1nj8AYtBqLzAVJwuvzRBQkO6jKQpiuLjK887l8oOedWcMGgiy6dU5Q1++EvHV13Go/j3XLRQZ+/knzlvraqAQBMMAZBZdxcJctb7/uB+B9qNtPK6LTlBHRtM8d2E0ylVPR6NM/WwE+iGr9gmo0NS9NJrRAR4/Q+S0GWONsYwml5bipluVJOzFlAqKzga0wR+hyl97NUrEATu2Bv50+dTHp+fljF8QiDLwlHsbhxUXB76aFfBRMZIvfX/r4MS5G/NJVTEApufmvjJM/gfUgyaQoeKmzbR9qdRdAeL+ZapgMS4WUECKRbn99i+30Z0WT7XEncZ9mDSnkXG/nEZkczgSOamZc6HkPluuX9uyaEHBuKmrF6wueff8lrULi6aMLVxYlTX9/Ofnc3MvTM09P33qwgVLFq/YXP7+m0VL1s2es37pxjevnt+yagnOy7v1Ut7NvJduzpl9i2lVNIBMkyXgqMkBOOiwHUISs76/vxhulZqqEOKgEz4Ubo224sxSKxM2elQtWEcPZvpoZEc1DNfKZQXH5Bnv317D/ef/KAmPRZM+JCPQ02Q+mk/mnyWLGPKMniEj7klheLu3Rf6OueQUaj93Rz6uYOdgNbVgvbgFM0IdZsOERJWqIKkp1TXqEDDXcHVZWRk1+c6qr6TL+GfA8Dwxy3OolCZDR5ivujp1phNiVT4ptYgoLw9iH+UI4NU8DpOaoaO5OzJ8MFkYFUgBcWnh4ky6FiY1rfbByLQW/CuYkPAqIiFC0AjezJGJT0l7yPFujqlM+JJ+cq0X6ZCjcEOKHWu3nVw+5DllnbqSqr9OvdK5oOzQ5iU7V14/cibzSPsuKPjjL5Hs2V2wctvTi1H0ntx072fP9+jbI/U1VL9Z7wEF6MDJgS2XjN596elnct/DC4pmZg0d36ZFzqacsiH04Z2XP38vf9P0Fzr1bde3a/Yr++rUs47p1Llv++fMtjGdhkxm52Gs/Hf8g3IBKMgHkYyhqauWYNlOo0nTAh7PaRhFw5obY33sxbe1a2UYJSxS69fUZwRBgmG0kutvynmuac/AWtWd3oqThZnMsWOqT+Oa05PVvEZaU+mdVO7DpzbXSLeHwqVoCWeqQc1TeeI+4RAEmYLoA2FBEi9ewkLg8/CeWo9n3UpTaXa8tuyrOdVgWX/6uD8sOvs+knZDm4Xy9i2U/NXAxSiPNJMeQxPpPsaCPPKtkuKTpzdt3f/GyGEjJk0aMTzTi7YiK2qLLFtLyHfbtpJvt0w/jnqg+aj78UPk8MUL5PARPHDDtptHppTe/OPaUQOX5eXOXjZgzML95MOdO1HD/XtR3K4d5N7ecvT8pUtkZ/kFsvv6NTSEawx+Rwrna9kQJqlh8W42szDGjRfp2aocb9fqOlguB8t2nujgV2zXt1OVrt3mzcHscU7JkPSJjhj9AtUkOlJZooOtjltbK5rm0LIcTJbxhBBDz/mzFuzaP2lupz7b9i99bWME+WPTIfWn9h+Kz8bFD5r7Ys7s5MWpSSEvLihcRM5n98trVG8lykgaQfnIY6FIGi29A/FQ+jsBI5SijtUEEMxDs6RTUgwoEMGzbaiCGjaRHcfcHU4YPlXmzZMy0CwUsA1keJ5K3n26WmEQBcnQGvaoqW24yqcyN4IdrfzoEhkgfhCZVagorFdbLBjDfXjKGVbjNMZaHJXJOFMclcmUmDhfHeHpFJR5CFJMKfTR6FqhbBSdwt9rKk2oKE1IYAWXrbEuVheFLM3GaLa1Mqgws8vJxcwbc9pd8cnueLc7SSuecT3vL27TqUBu3YZsxcXkWy6Q6MwKZNuwZ/5LyPx6mGSaXrq565Deo5fhO34yd4nJ5B4Ut38fimUy+RN5W+r3an5eu8SNrQfFmxp4zFnyfNw+tVtrAASzlVipPbfnZuDFJpLI6Zbae1NxuRJbCBgWSGfwXHpugsEBCeLys3LVkAQ1EAt8G2F1uOhxnXXWwEk2x4K1E8atXj1u/Lrq1O7dU9N69JDPjNu8afyEdescXZ5J79FnUnfAkA0g/ST/C4IhHDqzajQxog40Pa7OrTRU4HsoYQa2eQYr9RScKdbA8YK0pWgSWbOLzEOv7ELtqk5KHaRBReQFVFKEiitD17OVao834X3KcXDAADWAo8lQGyoJBC0b272wUEgV5tC0Xg2ofTyMV/LYHMyR5YuNauuoWImqLRzH4n3ePajZ5LbP9uhSvAsFbJw4oBQV4k2TUMTYTi1b93xm2pp5U8ZN7PM6IGiDC/FGpQziYaka424kjk8opWLjg7phWinVkRyYB4UgZaoZgHKPhEM0JICklVSxARtxLXk6rK6PyRxfq1E2XlOlRmqfV5eaID0VXdtSxaoqnxQ8rKpyu1DggO5dMzo/06P4zblLN3duv3bvkoU7S/p06Nxt8xB5TOsWT6UnNX4hb864tGF1GxdOyH954lPPPpuUy9m6efIHuH5NThrTnDRGmRrAcohNBWcyB1GiOWqJl1ayyP3ZT8mPaxVC7rL3b6TI3vdyOligrxoq8GN0MK4Ql3JgxOJPg5J15CdjqHZGzQ6O1mnJQo5Fov7oxRmX2pTtCszcu7ofBXS9i9/cvF6Kqbw4fXE30lS5Cwg6AEhtOeetqYqDQ8RM2iOUcwQBGunPTI0Oc1lizXjRgL+RX1DQ31AoDiC3/1z9e18209V4IpojdYNAcKiSj22IEw4G0HF/UO8eV9GaEsvVWoklvsNqLBMyqGDADNIL7QWWy26nKuEmcZ1MfqDtIavBZaDGE3GI4qDR9xWlSEMLYjURcGvuVhqKDNmwtdDYZ3DbF2KS672RnTsxOaFZk8BFjJ+Mt6MfeEVkWxUx1OiJhZE2sTAS+xdGst3GSAsj0Q/FH6BRFrwdD31m/kwATL9Dldw8TxRBv0XSsF2JuU+iiVOD6kmaF6OaJCEDL/mZucdWlxtfOrFx04nj5E+n3swe0H9kdv9+WVgeVfLu2Z3dt5w7t8Mwetr0Mb1HTZuSDXxfXS/Nlg5DPBwMBTDCQTQB2OMDAZTXlbfADReqP8Tr6bWK6kAAMsJlfBsATOLy8JqhvgDKFf4eFb6FAP7e23g9MsJFKYq/R+CA8ffkACjfKcf55xfx91yWGCRghEvQEm+qeU8sfU8sfw9g6EjmSbNpfF4H4mCwGqixIgNZ1QDLONa+nsXnYIrlSNZ/qs8pjaW7tz77FiYZjdqqJhk054ZV7/C4PoWJL+6JGmcdC8YzJo/O9+DPjp6/vXVye1+1Dt49Yd4fzo5qOHl67rBtf7ryzlsHcnu/gVpTr/epZjxj+E8A42DOwbbALJGB92TKuGo2gIbFPJH6rwaDr1ZAyNYL+5PFAL56WilWcrHtycovKFYyDq5aEe7903ufS1Olo95eNtzbe8yBz/5+AF2ORtlki1K6njQu8n6HZuOPAMFQeF/6SB4FwfA0r58PDJF8hQJBgdzrlqVAdoWCZJ+kKxWqUQ7iL9KwGitCaQg5ETIiNBR1J8dmoW6o2yxyDHWfRQ6Tw/ReX9QnjxzkB1Kah/qRAwASZRa/SSt1vgUnxEBjGKvKTZpyjWTeLjvGV4gFXOJKRpg4vuliVzxmq8cpJJECQbMB+yA13p+IzGgvafG8LoVnTIwOq2JzsiQFNirJbuSopSTvezV75apTjDd7e82LK7YsxVXNXsDJY3dSarJkf9r74bA5D/nJz216cAaN688YtPk7qo+Tu6N+XCEtyaEk2tAjr1YVtmU0Wgw7AeRMKjeh4GCSz30DrXmHyLUUfVQEwb4CX5N2y0TPlcAMEwmYsYlatMr8FqvZx51FWci5+t4s8usX5PuyMmRfuXUrrVUiH44/9/K5B+QSvdnB+3HR7LwixLKyNFM4wWCBJpRvEtu0mWhNo4TSSf9tJsjKkd8wxapl8PT1ojHacy7+HIONGokVEzUbv90Whe01VAdt62ehtuYgmFFHz7WyQxfm9zgx6OqRfofjm7ZcnDIxt/vJwQXjhtyVB1d8886W/KudkkauWtJzi9qs/qaYZiOeS85avazf0GsDRkwkH4IEvau/NcyVe9P5pUBruKhiHjkwB6B5BTs+8zieWSS9EynSDvzRMhzJXZwQxcmzjpR6E3IthHoWTpFvE8LZIBHai9P5VWk6fXH6tXS6F8YKmt8Q1YYV2iubVrB8ZoJgB1OpLioxboMujIuvjeOcnMVj11g8aRSTrg3qHJzQwwCK70nlknafr9h14ouPPpkybvzyY/88Pr00MePt8Te+9DYyvr12zZyEtiVVgV1LEv86c/kEqe/0tWYcsch2aNCIt4qK3x44MW9KP2vh4f79+wwm1V9NLz3dM3rJnHXdU7/DU/r3ypSS9xVEL1wNgOFlVlFuaAaR0JT6x8ZmT2k4fWmjCqh1PKP8ExvhdY2+6kczv6XG6RBHUZCQhULu+opcZzzD75gsUeROcnOszhf+S8m/zfxg0eJ7c6Zee+XNOS1W3O12ZuHRZ344cLLbOBxbMPz17bvm529Q7ORX8mJmiXfVK58uWv3Vgmnvrlgz6tVhLbekFrwyuupfT7fudnrX8vOfH2N2rQvsl5+Sy+itUHBCb9WoMeWNPPIwMsDXr80F6/EU4nN7Dhpq/Z+DppoHHdoNX5iFHvpe5oe35KeqIqS/ebdqzph2xEOOoXTulbVpU0V4C4yMDA2xeYmyAI5xNlk85WDJPAIolZkRZUeXyAbwYyS4dG1iXDLfeDm6K+vRXbVuvXDu4zPGZg1PgJtaMz8x3AJbNaNr8Nnc1JRheZ8VThnRbe7Yd+d+umrcoO5zR7/nyUaD23RdthuPHUz2p7Uv2EUJBN6CJmve20jOlJClrrVX16K0czn4SMzdw0dyvH3rfugBDGspl8D9GK5fiD+b8v+eQWB+hEHg5gwCT+65xxAIjFu95Qv9GQSRAAqrIrWCEybq0iiPlInYeBkwy6iYbPwW8538qJSlEu9dpXD43Vj7sJOTpUwcpA9nPa9qO0PQC0scJ5l9Aa+CFy1ixUH0iD86W/UC/ogy/laurAJWzCbDShRHPkZx3pXnAMEmxgGS0/04QHWewAEqK9MyshsB5AyekR0nit5/yXMqxbyrl4HW4hkoHnPacI2FFAn0tlrNDkhX1YsMPh+fn60kjdp0emJZ2TC04hPyLPryK/QeSZLTSSoq9/7Le5ONLw5Arsd37WFiPzIxB4xCuO+G+FlAQn2nREenr4LX+qHxtiMcrOK4e0O7wkswjSlpdGDjkZH8xgrU6LpLPQbkD/BeK8avN8lvgrf7xoSDDADB0F3XmSbqkd4gctC/GxM1SRW+Skbeni3Nzoga2gAmlZSUrVpVJo1pndfa68BvpuWl4c8BwXbSQ/4Hl8/nVYPN/vg6kUfdNosfY7BU1vvyamgYr8O3hPlS1ZzpyImOKSm+IjX5H/s2t04Na9h6iTeJFgS+R5nz3t1llo1hFV3kCZXraNHaenkcW5vXSQ/p73R3j4BsNZRp/39kX/HFs/h300J1tDBOTxwXuSU+9pjDqRsup5BxUlZa6Iyr7xzDuzbRUbvaL83JP9CPSvzGtyuuVv34x2OW4tBz+JeC+a9V3aKyj2Fc9TfGQN6pwgWvq6hBQ37iTKURFYLQ6Vbx39b6lYaJPgeEcX8sQbUJ7oXjSS0uQvTuNIs22IaK3eZkC7PlD8uTFY1kxDsaGQOrStVp28lyVEC2z90rdWYVy6x6uXJ57tjJk946h9+1r0Ph+1DKfmQustEi5mJvVb0weWX4/Wvk0s1v2O6UXf2tEei5i4FmkAzrVENKqi97G1/Bji2E3UkgRgikW73Pxs6lMYj7XC35VWnLBDVMbwx1THnVpr0ygl/xIEKfDCp96uGG5nDyY41b5eT+6qNMuIY+Byt7zocrl15p3e781GtfexONf1x0Ynb3pT8tfi+jzaVF98ivnq0FS7duW7Z4u/zUqHUOHLYUu7eSpTNHj51Ovpmx98KklxdOHT0qF7UggUc/+Mv7R+7cvv3msoj8dUzetwLgBQY7z3ZLPNst0kVFIRH0jhGkU2vI0XbzVlS6vdUAZ6Oko/Lbe07ZVwZ/VJnlY6ArFi6b0TBMhZhYvqNW/Lv+UIoWsSsJfkE7CFKmiElhhTUMiE1hVYxG6rKlJtH7DCZ305AsliW9PeQLclb68cePdhS0TnCUfImao9Gbyde79nwcXnXtpg0NRZ1mGhFG9dMjCkOHkMXk4IAL5PSREqR8GHf3r4Cq/0p64BN0raIgV7VFx9Ah6nIrUXrrJbr9IsGFdxYUM+BB+imynGN4BcvERAhpjFozkZrCiekP195oT8JZV3dvbJ0YFtWhXZd9+/CBba0GOOKf3SdflfZVkl1HLatDxw2X5cLZu07YVwe9+xIAZn0ClWJDGjihIfSnaSG3z5OLq/g3xbpqeKjMfWnOWg7VnwEmHHFPrtxlqcwkk+JwGvX1u2b5Vx4sk5/XIhYr/31TVuYu8ls2OnXtJC/iPX1Vi5F3ozbXRt9A7fZvMr66kLzTev/PMsLIUVPIG4FQDUu1TGZZbxedk1Wzg1ZmB0XNF9v3GGSrz06EVIhRJ5tTrD9r1TcVo8OfvKrpLHNFry3p0nbdtW7UF/2Y/MOza0XBrj0Fy3ZzB3RZwOj55KOkZXsc1AlFSZWUx/qhx3T47l3Q6igNkQYMEdBTDdHtPhY6VItQcVrfHxpGoRE+ox/AToxYEmtnI7ZRQ2vAj9RXTs/ecvAc+vFmN12N5Z+Dl66+cT3E+/IlUuWQxVJLzvlTwuVVUBeyVCOvN4InUBEFP+yRiNcewNfdzqBz1cDvaBxrsfUTA7YFGqC9DU5RwldvLZVryYAdO0bKqw6tlquO61mBr2JX10mAqg+RHmiMnA6h0EgE3gUfQ7BtSNA3NGbv+lbJTL26Usr95L2qplGrWX29/FfJYAAIgGSt5o86RjQtYIw2UkdSkVnAWbdUYbVrND+A6LVs4ska/gzvBEZDmhRrkmTYsG7thp+nyt8H7d0bgkxcHuQv8M9KNQRATG2G81A4ikb0s0FGfMUq6PIy/yvJLrmklCR0Zt1WkltZrAzcG0S+R5YgQPCKfBV/oPwFQiBeDeRWnoN24RLKVANrs5jcEaZKwNc95mHuBH+wg/y4s6hnt859lL/MWb1mduc+vbuwGgP5ezROOUdHV0fFgcxZ9KMI6GgBK3wsgME1lRMwRz6E3Ya+EAg2aKJKdp67krQeyJJvGdUMI8rkD/IA2FLD8OL0KoWPjuscds8dNjwv71geOdyhZYuOHVomtlfmD575h/0vvTQooWP7Fzp1ZquZSPqgN+BpMEFzlYJJvioVwYlTlYcw+5FwU7QpwSRlslQCjfn5Nu3rQIZeTs/t3SI5tPPzQ19clPfUsEFdI+Y0Gzdo6MantWzRHamN8iU4oQ2fCj9Dh8IDogMwnwzvH8wkPVxA+G2196h5dYpsNg7GRGGOO7TJG9742eym9Runz52T6Xo6Kym66TPKvUmLbG1CM1oaJy63pVs6PgUYRsgVUjOlmrNoWjHo4EkpK7br8CZZD6MhNkwjfdJYk8+SkiQXzrxG/rVn8oW765Rqch0lkOsckyET0Z+rD/N8bTKbb9tgkExSjNRCaispmVqnk7aBLQLbBvYNzAqUqeAGoky2y0kmXmbl1CVtKT+mxvd5eXT3Li9kdev5wuDkzi1auBom/rNzdlaXzpkjOrno3QaJyYC8I+Q7ZI1hBoTxWnYq0IAyueTQL2QamGDMMMqZdEoq0uisoeDTOncqk5w0Xzta7wzUo/OwHsa1G3v3QvKdDUpUb/eEFwe27htM5dz7NNlOrNV/gABfn1GjTsCVGgH3Pq1J+E+agLM8ynZcIK+Q4qAznLkDPd9ryx5bhQuUK9pjC2Hs2LZMXrLklmi2wQoBEKsGBAaJUVEUE8pAnz/EYgZO7EtORWETMqVj2QZr13mrl8wYexkQtJAdqIsBhM/R+3Iq8EaO+r6qBsOG8ZnSUZQtO7ouWLVqwehLgKABuY9awWEIgCjf5/yn5qwrxg+TPKPI/W7z3vjD6DHldJ7j5Jb4OJ1TPOwJYLmlPagDzy09KzvwIgPQx/eGsMf3ogxgUtSA3MSj4We+xi18NWSM6qhQa2B59Ls1qSqVmWXQjcMpDugjeizLJje7Lt3g+eOkm2359UQqtQiWYSeOk64yNJ1mnMN9FvFgUG2eUujtvCxn+LBpU0Zk5kjy4KmTMxsOnpIzBBBMgg04RjoMBparUqjpMyo1XYQZNsAaZUYhvILcQe4VOJ5MRwut6DWePVmPw7T3cbmVjMCtH1tTZGe87wfITe6sRJgQ6TDJs5I8tBIVAqJ6PEWaoMSBBIHsnfyr0tzI+eY4fGncFNYCmq1yKl6Fjys7JJqxA8CrwCpm3/iigY7P2ZhGS7E8i6LDUR8BKRrX5SBF4wQVdGxAAZuoASaYejfm5LDGvvq2I+H2aHuCXcrUUwnrspQNT+frmz+ywMnCgjaGWvpTPflFYGOxgNIZK9nJQamW8ynt3SlvLzY8pH0a0HCyR0b90e2ONdzPTvlL8o/WkD+P5i8BhbEmDam+/vEuiKfrclAH5osOmB97Uux7aQpx+lA1zls+FG6LtuFMNrEGCQzyrJPgk2ObgA1GV1AIlVc28+ax9RMoBkppRKz7vMyDoXCkp981ZhiMGu/k9T3uwIiHXVrtHI9DPjwuhV4YHscubpeSlBLbMMmNUlzK4E/o3zlylrxw5g79O4P6ocLTVdmoVfZdbPsTuUV6zpqFPx0n7V+/Zj1rpcwu9CaWvVVYrqpYs2bN+iNVD7Yw/d1FPVeJrlw0NILtqkuruncxzFqgn+oWsMb7iqJ3ovw5z2JNXpRJJECryqMBkxpr4x5EbIK+dD2qpre7QyTmIl+1i9NX7ULp0i6NOuVM4theTSdehdASGFcy6tZ57suFtgeXrnjQnPLvbIVl5ZUvnCkoWLyQRli6opijJ7H3qlJ65ggykN/JGyuK1q/EVB93V38bwHpHx0MqMKs3WB7Ir5+hh8Z81VzghqbQAlIgHY5C7cLU15ck+jeUEiIAsZ7GZqrHAV6ftDFpSq1gMifTuwLK6+Yy15TDeTame0zmGnEitiiciWyZKYbB+ETJpij28cmMpaY+E+Xrcun7TQMjbWshuSR+4QpLH7Wy57j0pcWyi9XldKY1ZAeU5HYb5cWo/6Sz09eWJXxF/jnjwBKycMWBmeTn+wlHXp9+ZgoatGTbF6hB2iHy0o408quUsaMZ+c0zNKRxdNVXgw2RjVDHTKfTKd1C90iD9efWkyj0ObvQm+wRdK+q/Bz7IzubqBcdzjNv4fr9cnKAVQ4CKCU8LqgHo3WC+m/rRQUoUs8NVsw1sAXoY3o1nPNgSsPZrkAFjFeKupluIoaU03QavaICiMsO7JY9Y3LISQ9a6kFtcl9EHrzjLTn97GnyJuo5bzaqGkmDj4sURD8+82V8wNv73HnOThrJ+xSfBxcsVu085hV1TjRNrkAH103BigcKVhxYJMy0N5wdmVWKpvY7Ojo6IVrK1FGvmH2P5lxJhx9BvxbWAslngSxQU0dv5ARxqR+ZLx/aMWOsbfbsX8kXBpX+BaHIf01YbJs85Y8HDWgeY4vjyHdvxG2NQg1RyNyl+ciAoqO3u66eyF8KMrPWygmqPXUhClzQCI6J3QXFPsfB+kSf2qAR4ghdgjq1AeWjQQNTg5gGUqau9Ri3G/TpSPZ0pCkyJpJNvfbp2ApmaqbGolw1JlasaYjhBObIGle6PifLN+BZkwZsTdkjFvYCvjkwqai10yncBNldTiM9GGKRm64UW69EFEs7dKIdZy7SP1z34Dep374r4XP3J5LlqKPsnYzXZnj3oqH7vZW4+4ASsps1FJNaFI0o+nHh1KLEZkU/o6PJI4qGovuDmMQ0AZB+pSsXAWPFDV/c0uoKeBtilkMbcqnkZxzYVK3cEoclCNB8oI936KKzMlIz62ItudxsN49Noz1S6EEq/7at+Urz9ZafP0TffeH9Hv2Wv9nuPdkcW1v8TB4kSMWKpd/MEvWQ93wIHp+PJg4vORVQAghiqr+XI+gcomCF2BBNBBmsZkUDr2lExXqmghNl6mdVt8LntDhZUwwtoeLXv9lewdQhlM/Qwowgm6cisBOiFLPWmZIF9AbOFGGpkBR6YVXwdqOdXsypFnOKHIFXkV8O9J30I/07U0n/Tl2RpNE3yKWdFvx8jpqzgV7QUFI9XZ2+gV68H2NkQoFDfN31v6HWygnDVahTV9Rz/9o+cTsVay2DuAUAgQkSwt02O/O5HGDmtUMsK2nALNywAHWrcfUDpHhwyWpP4RbskZDxE4+UG0tWkLtHL3+ClBhvMi6PJT99cPECikST464A5hoq8SqUaJgspiLEhKmB1yizNJwiCJzB15jhUHhQNKP06wZs48/a6bMmdmpDxF63gu+jteBjalTbDa6KHDx9jf7hul8jC/ntn9TE9iEH0fObtu8uJJQVTb5D1pKlxfjO91f//AAtRfFvLJ9XjADBblwgfSMxD7yeLk/pYBAc8mM1f8MovrigiHe6GYkGww8MydHFVJpjd6it3FfGmTVR1cMg5sL4rvhgn21dJ88b3nPYO6Ctp/Qe739SF15VA7RePwFs/v9THxSepXosG4WL0v/fDiksQ1u+b9+1k1P3Refnzhr/0Ue4W1kZ7ZQy/HB5682JEyeOKKximV7ez0X6is7HAcN1QGeUWOIu7l/iMC3+rXCNgoNsYCZJqyLXhuZ6iJxTprzUYm7Pyw8eePbtQ2cOjkFNPcoo242JdGx0qH9461jr3xsBINgir0TrDK0gAELoGLVTJgTiTSe2kjwDDK36j8pZsqDXW8AYpfTwg2QHA6ToyE8O/xaSsoIeoZKWYsZdFWmknESKoD0A3ifFPJ4b7vBPotgFbrjNHsa5kGG2x1PE2Zf+99zwxzLDq3/CG+no4iFXHJb46xoaJXwu6+Z1ZD6sgq0gZfozwMFYwwDHIgPcj/qtRsazLMz/CQMcXf03DHDM/HZ8XLI/8osajn/zixr4Mb+oEWzw/0UNKkSxbkQjDrMR9504sZgsNaA528jCT8yo6YI9e8ZiA3Gg2PqAoJBanmAp7om/dyMFexfiuczeSFAit8VTDNNA4h07pold/msgsgxjH+NIYw6DyHhXtSMZuA8eiSWfKWpr1nj6GdAHRgJj8AcIqGEo9QCMeiZVXaOelG90GUVk7+FJQgdP3pu2YHTXjqOyO3cdPTCpgYsDfIZpx/7SOXtEty7DKcaX2LJBfGJydXXNr/xgA5g5UtQQQP4r589Gwtj/7hdsrsmIcjrYYYuMcnXrxmpoQeh1pviltErr+8ycvuk3baDHiJ6s6ze1dpe2b9e1/u5C/nbl41/QV7c/RRF4YxGeV9sDHG8kErL8lsl6gJPo/7fmgoD+SawHU12YANTREvJtgv8hMpESmD8Wzg52E8dM7EIAjypUbKpp8xoioER1tJ6kYj8bzcDTABTPJQ+EdlF793pQXfkGuS80jZJvFBUV6bqihkNPHSfmkU6R4UGYh3JiX0fOgzIwT0To7FTh4wrxBU/hfaOlvQ9O377NmqeSZg+ktKorUloR6lhSQk4Aqv6R9vuYqrSFSJguNEvQ7eBibw8haEM+DF8FBWXqx2EWFi6A+0yKj3jH3F/0/zV2FeBx3Ep4dN7TnYOGMzc5s8PwHEOYmZMyM1zytYFXZmbm1hSnjD6XufUXfFRmZmau69snjeRZ7WkLHyS2/N9/o9nRrDSSZpRhYA6QvIA8IHW9uUA+/bQ3G8hrr+l8IA9fnerUwQ+25OqHL2bcdVUlhci4ULW0bxaBWWwMq4eYP9lvsl9UFKcMQB/JniA0jYZkfx+6ntBNsD2AeyA30eWEbofNbILFPcAx0Lyb0An4VXAXpHFnOz90lMj4KfFfSp9oY8vYdOsTA/gPaKzeJ65Qn4AIiGt1rFy0H52aJSsoiPYabD+WPef+LNqxTkBkmmgfqnQJ3WwGxMx7A6QdG30kOy8APcCHnkHoJrgiAJ3FTXSE0AnYJNAFaegcTzvuOwJ3KkozUsnu3kz8FMNKhrU0HQCh5Qb6SKgjNF2PSXKFdj8VaJRdo5vcaQHcUa7QLwn0PpEIoRPuGk92QvcRsseU7CprOlrOP7TldLMJtt615WCuc7TKWm3xK1ijRtNBimRZNBh9JHs3AF3uQzcSugk+D0JzE11J6Hb4mE2y0BWm3LyH0AlWIrgL0tA1Qi9jtF4w0zOO1vG6p8Np/JHPTMZQdht9JHuY0HSoIZnnQ9cTugk2BXAXcAPNuwmdgB+80UroIiF7hZYdsw2jNJO1NOcQP6VESPbV0mAe2XBKoGfrkfcigEbT4f7ksEwLrbkPDEAPN9EcNJpD0+EBWGYyf0HY9oRjYUf4sJtJigS0AEBBGnoM+6FjvNQJSbIHfaINfoS+1idGCC3W+z6xD34CPZho/FK075maJXO5iva52oNNRQ+GGUhRM/O1HjeTZuiAbjKOmrHRR7IdA9ClJpoDolGPewdgmcm8mZgTcBHpxkNXCd2M0v5LppQ6JCxHxwXIPutC1+dhJD6sJbkKINRgYI8scX2+S2K5wrpPC6zYl1dY9F3Vrs0cZQr9qEDPDm8idMLdWaAL0tB9GfkulUEQLWaFspj9HEuWPMWu8vqhvlfqpyOk871PJXpQZjD6SLZ3AHqwieaAaHw6hwZgfXJ8Qdj2Ax0LG/dhN5MUCbjGe5KErhAaGaE1glnKUO7ddC+3ktx07zaZg3Lb6CPZzoSmNVQy10RzQDT2cl+bGbVNzJuJOQGXeJITulBIXqYlxzxaKMteWpYSAJ/PIskJvVmjOSR2Ina8ByCxBYK91JyN8K9o/rIGtrIpkJtWlqHfG8bIDz9InmjN6ihizctOwzQWmSMDiLkFfmANFnN/H/MrihnR1wKzuIcLNFbqSi3FSl35UASHBGx10L4h6chXYkUe84lkmPPm7GfkxUpxik/X1co1bqPkx3oLIvoPATXgDUrxT+ib0Mhq7zjQrWerQl8bRY0vWd+LDgddspqtlyW/fk+EbsU85amlmKd8JDTAJX+Wmpz2Ant/GSp+GZqD+6JqJdAZcgr+RsLyoSKNYYZ5tHGUL315rZm46M/Tl6fposbLZl45MBKUzbzMU9A5Oq95pHp2UGJzT1/f6BTnrqvqi0V2UrNjHAVb2C4Q8+/3JOP6zY1ZxXHMzNXoWhozahVK7xDi3oW4m+CZIG5ucHNAbhztkwOYmclcRMyt7K4A5grHlLoLmRW6JEDqShYsdTN8xHa1uMv+QOrmlcxiLtfMWCMNZ9ZDNHMrm2nNkko0s9h7DA/nIaiGeYh+KuOFcK74ufMbmfIrHpdxCvGP/GntvU/H346H1na+Lf+EKcGWitbOp8Xf710a3ycu4vv7Suw7olX+s5e37uC/0bpjDVzGFkCuMRMnT0Jv+QdpRrBmT/JRdBkojljNHCkm5hZ4gs20mAf6mF9BZoU+F5jFXebjdoi7la0LWFvlOubcpAu5FXoSPntrboJVN29NLcXacSVwlOX99Gl0XzbgHOsKtDpsWaxDiFR0NeTLrtfH8xX5XvJeqjGX7g99Nefme+P9+p69jPpzNLzPOwxL0eENgdShmKO+CkbCcWCfEMFXruwErRrwLgIec46SkJ3DcvAE9DBxGXbY08OEMQ32upNjnk3vrFLIYv8N7yoeqU3rU7Wdxr43iX3Gh3PXM6+X+7+W+tGX0j7VpRPaP3Z4PXV69e4OK/u6zExvH9qgktsHrMeb4TY207KZbB48923+J0u3GBrTWIEPvcVw7eO22Z6I1pCYwR6ZFyoftxNY88caH/NoYm6B79mukOtn7ijXowKZcQwt1OhTaAwRd0eNRBN3EXG3spsCpK5xDKlxDC3U6Fqw5R7RK3ePK2sSKm4QfottTLVR3y8nlk1sOOzql1DPcihKgE9shNbrtzTKqdYMRVBwXh6ZLtCLNHoQmw6ZICYfHTHF6D4AEDouMooiFe3uJDbHioJEVJ/dZoHeN/yZWhsguhxCVp8jTKHvF+hT+G/EvcadQp7UO1MU1pI0CfTB4fuRW6ErgfvQhQb6C4GeGSkm7hZ3FZtpcUc0+jmBHhp+GbkVejmAxa3RUJjalR0T7lDcwGHDR5mCozu1lB2KT3Cxat0usbcJvjMjDsnRCoMC4kJ9tc08IN5evwpPimhZESs0EiTLhWIevQArfy3G9iXsW2yvExZ5WqROsI9ST5CdwOo0O11iTMY4sstbB6HxaO3XK7Rb675irSNytCy39rjhMPZytLbIK9AiLxSW2g9H41Ldno3tG2TtQhx5Y3S8rJqNtWKbUT0nktfnx2HccZlGF7KrfJYyGFeoJIusi4jc6jtX43fu0uPKPP3Igu1uN7arOopJLYvEv+h0QZY/FoPM0qru5CFABkTuHM4VP3fGo3KqIP65Nx4dHRWzhLujYsYwOjpVlI7ufDvK1t2/T/SI6MnRjHX3Ph19WwKWRuXkQX5iaXSfqJw8SIpvBJTmDWYfWtmjPZu1BG0clATY3thzP43lcRTxO5L9yOp9HpWi1rTGTuEaW6H3CPA2MU+fsgaj4kZ9PoN6u6DHlbn+FQu212K7kqWeZGlmeazBehMMNP0KB1rvNx/PLEnyKZogsQ7J/ZS7bzgPuNyxMSKC31BEcA18yqZBri8iqGc5tBJ/kFbtaw6m2RZt/QzSWGSOZBFzC8tn4y3mch/zK8iMaGHBzOKO+7gbiHsjWxUQx6yO/iBut5n8LvFvhE8CYgjlmT90DNafwCqGaB/1+omfErDzUOzZR+g5tI+dFRruB/C9uyR/lraPW3pcWSFRcaMdHIB2sLLHlfn0kQXb3Z+xXclST7I0QxtrsGQZpO3jACHLfzkgC9rHy8ySJIcpLNY8ROYG3csLWaNleUN1LzHrPvZyF41eTr3UqfclOtPkbiTuJrg6iJsb3ByQG2chewQwM82cWiwrNSKzij22AkiO1GxZFUBxYPte7i8S3+MSXun7SNTrPj0u4Wk8BkjeDHey8Zbkw/9A8ua1LF1yiu6OFZJcjU++UX/jwfiNmT2uzP0v2ndV7bAZ28eKnhIee3QJgMSnFoeuNfDHwtfYjvua+DwbteTtAZ6kv5IcKw58wY8F+lZ2Zfg8isyXU6y9HZ5kE6w4fr5jRrm+oIhY+56O9daLMTOK/xUxr4EuikARc0euHOfE/CAxr9mb/A1lz8uRWJJ5ADG3wNdeBIp2d/N9zK8gs0KfD8zijvm4LyXuNraQTbf2HvI5RdoUP9+D+NvgY+hrRf5ijvY39B119B0b2Szc37D2TjqKvO9w+oVd+o6N8A76NCtuiZfL8H5h6nis21kKK8E7GbZD0LqLMjYVysQsnU6uPHnjX4F15KbV7s3mPG1BZRX3PO/063uXUEvzzSqfZVe8N3HdvmrZtN9KZt1BFdGzj5wJdK7wT9ItxcUv8az05eMf3PrTacfFBn9WDta4yfHfwy5L61Da1dTsjOe8NeFNxv1UWgJenDjIV7bCdVVlURyjE/WscjOrT5/z074X1qBA77KHRleSz6XcNMmBTKFxzwu5Jys0XBa058WN+DEHih83VREzxY9jJjPvJuYEdJF9evOlLIfsU1XjxDfoFP22OJtkodUSzbCwbgO+W/bW6LKAmH0/fLdobv4LcbeyIwK4sx2Tuwu5FTozgDubGdyReuJuhptZg8U9kBvcHJAbvf90ZjHrp6NyAeKe96mqj6HtdpSI9kcx8xiO77M0+jhAbtPkk9O0RjBLXuQkgT5d6+9Tdoov6ie5R2huzOyE2j5XoxusnR16k2uLHUcWOys0IsBiY1HDYpF7D4Vm5wfMhQbY3LqXjwTMs/Jsbo0uDhoNJjfvJu4EzvEL0uQu9vaMNf9m4k/gfmSBT3YcEx2D/mCXeRb8GrCO6IPyW/s7An0B2GMuO9NbUU41VpTN7nz3VXtnyovk8hUoyVitm2tZvbUWztaSYDU1lGS5Rt9pr2goar5DapXcg6FzLDewkwF3clKr5K4G7Q7fAFsBtZJqdx5B/GRsv8l5BAD7H5Z1YrD/2B7ewT2AtPgwafFG5wE2x9JipqlFfgayKPQCyLK0mOXzieXE3Q4XsQmWT+znmE/oC/KJ7WWOD0saV5VCnTu4tI9yOBk6YkYO6T+vATQwJk/1yX9yM2I62U6W7xScw/tjGcj+HP+MlxW474Bf/7Qq7xW95UPrsL4XlmOozatlXnUv545HVSVRWVQ09SuLPPTo76t7i4o6z3WPwnKiA2RxUcbFObnfb9GVRdXc+r/YV4z8Qw1sZxtCc1kEZkKreyBEoXP0YB3BzwFwRuOzH4bPeLt7eupktKGlPhvawE7QNrTUZ0MbYBO235razZmD+KEaPwH6yEiowH+P+Pm6nQP8H+dLiG0AeAFVyIlBAzEUA1EjafSd9F8ApbIGcr3Zw/Ja6+t6vm/3rCXJZSo7SApPEpDdC7SinPG3dkFRYg6DhDaArzJJLFdQ1LOZGNtEcjIz2RQ2QAUqt626tEoiK/ZSR5J9xMzc9zDQItDftdSC+w9Alz7xTheekvJReeozPUxQQQjjcqJ/+cSLT+XVHgI57X3miegMwgkKrPUDInsISgAAAAEAAAACAADiktOWXw889QAbCAAAAAAAxPARLgAAAADQ206a+hv91QkwCHMAAAAJAAIAAAAAAAB4AWNgZGBgz/nHw8DA6flL+p8XpwFQBAUwzgEAcBwFBXgBjZQDsCXJEoa/qsrq897atu2xbdu2bXum79iztm3btm3bu72ZEbcjTow74o+vXZWZf2ZI6U3p4f4Ck9+V8/0S5ss3jJOpDI1vM0D+oI/rQz9/N3P84xwTRnKQLKCpW87BvgxH+wNZGhqzh74/SnWlqouqq6qMar1qtqqJariqt/ueue4GjpfdqS+9WSunMDc8RqPCqQyM5fXff3FFLMO4WI0rJFUN1utRTIw3c4U/mdtkIGWi6P2mXJH8rc9uVk1nbNwJ4xDd++VyH83lUU6Pp5HGfTmosD9VolBBnmVXeZK2/lCWh/ocp/x/aE/1cDbiJ+jzjvr9FFI5jc4yi25ShS7+MSrrve7Sn9T9QIn7IrtPdlH+wNmFwCIZqO8vpZPYdynd/C3Kw5Tn8H8ZwPzwPocngRPDbxwfnmAfZXt9p7r7ieuUe8YRzNLzRdJdc30pneLNytc51H3FCvmcjrq/vkkDOoUVrAgP0FeGMi1pqPevZLz/h5lSlx7+O2qqqvqZTJL5rA9fUMvvwwqt6Wi9PzFcpLqfvlrPNkkZmicVGKZ7qV2YmP0otelg+ZM7uVQeZFHyAE3leqbKMurpvzrJ2ayK6znY/ckGGcV6acYR/niOiIu4UJ8vK1xA/0Jteri/OT/O03zdkX0cp9JHlmssS0nlJ+b7kN0cHuaKUEIaBjLD8uivYYI/gTPCo0zyf9PVd2Qq/NPVffdP+VidC5NqLHXr6K46za3hKP8y/f1bVPYP6PmNLPR9GazqoLFV0hjLWu6SNhyaLOWy/43l8kIvKiQnkspUusU3OVSO4AQZzWGxPl1iM71ezuU+aJ2H6vkiKrt/OM9ylefS/hlWs0RrdK71hnk9dlGpZC6Yv/w52c/m2S1KfWweLpY/OXtffXy98gvVq7l/N5Z5t1jmXfPnFmWeVb8Wy/2ZPap1W618TnV37tWNZT4tlvnUZDHYvzemxWXrbZHau3F/ulm8to9t0frbemyL1BxZ/2m+btM4zlHeqjxb+bXyRc3nfu6H7C/llckabgtvUmJzwnxns8L6VZpygfpuhfIKZTujn8fZYnyGs20Ny8/GlIHZ3VYPy9PGtFlj/V7KVqXsZfPHZsA2aR6yOVHMR/i/1dvqsL20+WYzxjxidcvnnM2ajWk9bz1uMVh/599uzPxflkObszbr8vrnzzbhBRqTaTB75O/mNf4PGySVPAB4ATzBAxBbWQAAwNi2bfw4ebyr7UFt27ZtY1Dbtm3btu1Rd1ksVsN/J7O2sAF7GQdxTnIecBVcwG3NncBdzT3IfcT9ySvH68E7zCf8/vzbgv8ErQW3haWEtYUdhOOFm4QXRRnRJbFe3EV8RCKXVJQMljyXxqVlpL2lZ6QfZMVk/WTn5Q75YPltRTlFF8UmxSMlVk5Q7lF+UdlUGVUNVX/VLNU2dVo9QX1fU1SzRPNN20W7VftWR3VTdKv1Fn1T/XqD0dDDsNHoNHY0bjE+MeVNfU37TN/M2FzNPMl81SKztLBcs1LrHOt2WwPbeHvOPt++2n7CMcQxy3HJaXa2dD5w8VwVXT1dM1zn3Xx3ZXdtd1f3ePdSj8TT1rPcG/D28j7zLfEb/S38VwMgMC2wNsgOlg+OCF4NZUObw1XDg8KPI5UiW6KmaOvogei7mCtWItY+Ni52OPY9/n+8U3xN/H78NyNmtEyBqc30ZUYyU5mTzJuELBFOkESVxJVk1xQvpUqdSWfSqzMVMquyweyA7LMcPxfKTcjdy/3IB/Pd8g8LwQItzPt7GVCBbuAiNMLecBJcCvfAy/ANEiM9ciOAKqNmqD+ahlaiA+gm+oCl2IMhroJb4gF4Ol6FD+Nb+COREQ8BpCppRbqRQWQmWUMOkdvkI5VSD8W0Kv1TEDzACAEFAADNNWTbtvltZHPItm3btm3btn22hjPeGwbmgs3gJHgEfoIEmA9Whq1gJzgUzoab4ElUAB1CN9EHFI4ycQlcH3PcB4/HB/B1/BaH4HRSjNQlG2lJ2oBy2peOp8voXnqFvqbfaRzLy0qzRkyxAWwyW8UOsjPsOnvHfrEwlslL8Cq8ARe8Hx/GJ/Hl/A5/wb/waJFLFBLlRFNhRG8xTiwRu8Ul8VqEiHRZTFaS9SSTveU4uVTukZfkPflKfpNBMlUVVuVVbdVcEdVLDVIz1Xp1TN1Rn1WUzq0r6Ja6kz5tipo6hpheZoxZavaYy+aVCTQptpCtaaHtbkfZhXaHPW+f2f82xRV2tRxyPdxoN90tduvdbnfJvXQBLsmP8Qv9Wr/TH/UX/d0sCRMZsgAAAAABAAABnACPABYAVAAFAAEAAAAAAA4AAAIAAhQABgABeAFdjjN7AwAYhN/a3evuZTAlW2x7im3+/VyM5zPvgCtynHFyfsMJ97DOT3lUtcrP9vrne/kF3zyv80teca3zRxIUidGT7zGWxahQY0KbAkNSVORHNDTp8omRX/4lBok8VtRbZuaDLz9Hf+qMJX0s/ElmS/nVpC8raVpR1WNITdM2DfUqdBlRkf0RwIsdJyHi8j8rFnNKFSE1AAAAeAFjYGYAg/9ZDCkMWAAAKh8B0QB4AdvAo72BQZthEyMfkzbjJn5GILmd38pAVVqAgUObYTujh7WeogiQuZ0pwsNCA8xiDnI2URUDsVjifG20JUEsVjMdJUl+EIutMNbNSBrEYp9YHmOlDGJx1KUHWEqBWJwhrmZq4iAWV1mCt5ksiMXdnOIHUcdzc1NXsg2IxSsiyMvJBmLx2RipywiCHLNJgIsd6FgF19pMCZdNBkKMxZs2iACJABHGkk0NIKJAhLF0E78MUCxfhrEUAOkaMm8AAAA=) format('woff'); -} - -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: bold; - src: - local('Roboto Medium'), - url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEbcABAAAAAAfQwAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHUE9TAAABbAAABOQAAAv2MtQEeUdTVUIAAAZQAAAAQQAAAFCyIrRQT1MvMgAABpQAAABXAAAAYLorAUBjbWFwAAAG7AAAAI8AAADEj/6wZGN2dCAAAAd8AAAAMAAAADAX3wLxZnBnbQAAB6wAAAE/AAABvC/mTqtnYXNwAAAI7AAAAAwAAAAMAAgAE2dseWYAAAj4AAA2eQAAYlxNsqlBaGVhZAAAP3QAAAA0AAAANve2KKdoaGVhAAA/qAAAAB8AAAAkDRcHFmhtdHgAAD/IAAACPAAAA3CPSUvWbG9jYQAAQgQAAAG6AAABusPVqwRtYXhwAABDwAAAACAAAAAgAwkC3m5hbWUAAEPgAAAAtAAAAU4XNjG1cG9zdAAARJQAAAF3AAACF7VLITZwcmVwAABGDAAAAM8AAAEuQJ9pDngBpJUDrCVbE0ZX9znX1ti2bdu2bU/w89nm1di2bdu2jXjqfWO7V1ajUru2Otk4QCD5qIRbqUqtRoT2aj+oDynwApjhwNN34fbsPKAPobrrDjggvbggAz21cOiHFyjoKeIpwkH3sHvRve4pxWVnojPdve7MdZY7e53zrq+bzL3r5nDzuTXcfm6iJ587Wa5U/lMuekp5hHv9Ge568okijyiFQ0F8CCSITGQhK9nITh7yUkDxQhSmKMUpQSlKU4bq1KExzWlBK9rwCZ/yGZ/zBV/yNd/wLd/xM7/yG7/zB3+SyFKWs4GNbGYLh/BSnBhKkI5SJCVR5iXs3j4iZGqZyX6nKNFUsq1UsSNUldVkDdnADtNIz8Z2mmZ2geZ2llbyE7X5VH4mP5dfyC/lCNUYKUfJ0XKMHCvHq8YEOVFOkpPlLNWeLefIuXKeXKg+FsnFcolcqr6Wy1XK36SxbpUOLWzxg/tsXJoSxlcWgw9FlVPcTlLCLlHKtpAovYruU/SyIptJlH6ay0K13Upva8e/rYNal2OcjWGB/Y2XYGIoR6SyjtOOaBQhXJEQRS4qEvag51P4ktuuUEzGyjgZLxNkAD4kI1AGk1Ets6lVSjaQjI1ys9wig6iicVaV1WQN2UiOlxPkRDlJTparpIfqRNGUGFpIH8IsgQiZWm6SW6VGpMxiMlbGyXiZID1ksBk0tasa+REcgrWbjua9k1ACbC+aMyG2RGONorqd1Ey3KvsMmr9WKUGrtEHZP2iV5miVZrPN5uFQXa21FgShu/bK9V7HCz4/+M4nBcnA9ltfW25z7ZKNs3G89bp3io+47JSdtbHvkX+Ct+dcfK7+Bdtpf+h+/o1trsvLQPQzsat2+pW5F3jvS5U0lhdi522PtbA9L6zn5efGkM/y3LsGAHbD/g22Tyv213N1GtoduwmSRzWG2go7BIS/cix/ameH20SbZFOJQFgyAFto4y3STgLhds2m2LIn+dtsB9i2JxWyA9hJ9fuNXeLF+uvtiB0DCWES6wxgl+WMN6zPWQDCnu6j/sUmGs+LuV1spo2wdRZrE4gkiiiLfNTvJRtgJ9RHpMZ/WqP4FIBQVAv5Qp3L2hFe3GM7/qa/5BWxg2/Iv/NsW7UG7Bzvdb0p326+Inb0PesfeLf56q+7BkDEK/LaAQBJXldHI9X96Q6+dVSX3m8mGhvy7ZdDbXSCE0YEqcn86BTP/eQUL0oxdIZTEp3iVKIyVahGTepRnwY0RCc6LWlF61ee4rHEEU8CiYxgJKMYzRjGMp4JTGQSk5nJLGYzh7nMYynLHp34m9CZz1YO4ZKfMOEQIRxSC4fMwiWL8JBVeMkmfMgtfMkj/Mgr/CkgvBQUARQVgRQTvhQXQZQQwZQUIZQSoZQWYVQS4VQWEVQRkVQTUdQU0WjmujcQMTQUETQWSWguktJSJKOVSEprkZyvhYdv+A4ffhZefuVP3WPRaUeiCGUEYwlnvIhkApOJYqaIZhbziGGpSMoyEcFykZRNwmGrcDgkfHDkP4WQhQ3EQBDE9pmZ+m/pK4ovGh2DLW8Y/0wRrZ3sTlWy/Ut6kPnlj7St3vzVJ3/zxZ878t9iVrSeNZdng1ty+3Z0tRvzw/zamDuNWXr9V2Q8vEZPedSbe/UNmH3D1uu4Sr5k7uHPvuMCT5oZE7a0fYJ4AWNgZGBg4GKQY9BhYHRx8wlh4GBgYQCC///BMow5memJQDEGCA8oxwKmOYBYCESDxa4xMDH4MDACoScANIcG1QAAAHgBY2BmWcj4hYGVgYF1FqsxAwOjPIRmvsiQxsTAwADEUPCAgel9AINCNJCpAOK75+enAyne/385kv5eZWDgSGLSVmBgnO/PyMDAYsW6gUEBCJkA3C8QGAB4AWNgYGACYmYgFgGSjGCahWEDkNZgUACyOBh4GeoYTjCcZPjPaMgYzHSM6RbTHQURBSkFOQUlBSsFF4UShTVKQv//A3XwAnUsAKo8BVQZBFUprCChIANUaYlQ+f/r/8f/DzEI/T/4f8L/gr///r7+++rBlgcbH2x4sPbB9Ad9D+IfaNw7DHQLkQAAN6c0ewAAKgDDAJIAmACHAGgAjACqAAAAFf5gABUEOgAVBbAAFQSNABADIQALBhgAFQAAAAB4AV2OBc4bMRCF7f4UlCoohmyFE1sRQ0WB3ZTbcDxlJlEPUOaGzvJWuBHmODlEaaFsGJ5PD0ydR7RnHM5X5PLv7/Eu40R3bt7Q4EoI+7EFfkvjkAKvSY0dJbrYKXYHJk9iJmZn781EVzy6fQ+7xcB7jfszagiwoXns2ZGRaFLqd3if6JTGro/ZDTAz8gBPAkDgg1Ljq8aeOi+wU+qZvsErK4WmRSkphY1Nz2BjpSSRxv5vjZ5//vh4qPZAYb+mEQkJQ4NmCoxmszDLS7yazVKzPP3ON//mLmf/F5p/F7BTtF3+qhd0XuVlyi/kZV56CsnSiKrzQ2N7EiVpxBSO2hpxhWOeSyinzD+J2dCsm2yX3XUj7NPIrNnRne1TSiHvwcUn9zD7XSMPkVRofnIFu2KcY8xKrdmxna1F+gexEIitAAABAAIACAAC//8AD3gBfFcFfBu5sx5pyWkuyW5iO0md15yzzboUqilQZmZmTCllZpcZjvnKTGs3x8x851duj5mZIcob2fGL3T/499uJZyWP5ht9+kYBCncDkB2SCQIoUAImdB5m0iJHkKa2GR5xRHRECzqy2aD5sCuOd4aHiEy19DKTFBWXEF1za7rXTXb8jB/ytfDCX/2+AsC4HcRUOkRuCCIkQUE0roChBGtdXAs6Fu4IqkljoU0ljDEVDBo1WZVzLpE2aCTlT3oD+xYNj90KQLwTc3ZALmyMxk7BcCmYcz0AzDmUnBLJNLmoum1y32Q6OqTQZP5CKQqKAl/UecXxy3CThM1kNWipf4OumRo2U1RTDZupqpkeNi2qmRs2bWFTUc2csGkPm0Q1s8MmVU0HT1oX9Azd64w8bsHNH5seedBm6PTEh72O9PqcSOU/E63PkT4f9DnaJ/xd+bt/9zqy+MPyD8ndrJLcfT8p20P2snH82cNeup9V0lJSBvghMLm2QDTke6AFTIsiTkKQSTHEeejkccTZeUkcYLYaFEg9nCTVvCHMrcptMCNuKI/j4tbFbbBZ/RCC8hguw/B6fH6v22a323SPoefJNqs9Ex2rrNh0r2H4/W6r3d3SJ7hnrz1//tVTe08889OcCZWVM7adf/Pcg3vOfi7Sb7ZNnb2MrBg8p7Dba2cOX7Jee6fhjy+tvHnmqCFVJb1ePn3qzYznns1497K0c1kVAEgwqfZraYv0AqSAA5qCHypgEZilRWZ5UT2PYsgNdAxLlEcNYjwKajQGgw8Es+JcAwHH5qETLIgby1WDHhpXgAyPz93SbkOsep7hjeL0eqNVIP9lTHKRzEmHdu0+dGjn7sPHunfq0LV7h47daMbhnXWvenbo0ql7x47dmLCSvrRSvDNw6uSa3oETJwLthg9r37v9iBHt/3lj9amTgT5rTpwMtBsxtGOfdiNGtPujmzivGwjQpvZr8WesjxPZUAYhMK1F/0qJXHRyLXWOAx0H50dxboQfxapphKtHGVUGHf1gc6PC6GkIo0NCsYGDIdUo5n9yHFb8Uz0qpyqHT8qpyOmZI4w2c1RTC1d7tc4anqdBGhkdmshNVo7GA2MF8+opFMrXcvAt55yfJNbVj8SKVhCJpBCfz+vGL5mK0yVjQRtLLX1+osicbALyzY/jkdK22by5e7c3z+x5acqYSaSkScEL3Xs8T9l3/Qc8NvUqY+SjNsv87OFG3YpXpZYUzytzDe7coy/ZsiQ4Yuzd/U688NSmCXd17sZub3v7oC2fjfhCGltW8VnjxjpZZy+dWjwpIJwormzTK79/iW/wBAAgqGEiyZKzQISGiQpWr1h4SISYUkm57FNqBQIBVkr3y8NAQ+3D36A4IWQV/JmZqJw2NT1T0Q3QAqTsQblg41NPbiqQH2Iv035kK206mGysZG3YMSs7xtrMDAyhTcjWSC4axqy4LiZRQdFdvnTNq1KX320HjVawZx6SCzc8/UKgUH6QtKPt2PKac4MDleRlMsxKBpFXpq4ZVBNmKyIxHbSvMAF1NBWyAQPW6z3nEIpfMhe2fL8kuIX8TClDEQQX6cwueUmTlNNpRPey/31uR/D0LuH14ccWkqFs//wTw9hv00gu+7IyEr8T3Cw2Ex+EZHAAktOEiPrIJO5s8hWcNqema06vU3PT02QFW/8NW0tWfSM432N9SfA9chuP5WOfkxnwHUgggyki+HwUXGw8M+65u8v3uexl0v7FyJpdaRIdRN8AAdJ5nYKQIGi4CB1U8zNNoUnPR3X1LjTb4EsQYnsMWACwJO6xk7e4bT/99GX0N7R2ndAo0jMzAOfHN02cnKkT94fv09bvr5QLAD8UpuJ51ev0rCK6SgOc3gCn19OKL9lADWokUbkS0ldBzwNNU8HdEjRXVGu0qPKIei288y5jBN59h9Cfl8yfv3jp/PmLaAn7hF0izUgO6U0cpAW7wD7NP3vy5Fk2o/rUyQeieM4C0DcRjwS+aHYSJiRhdokFkVRTjNUkvr1gffj25dM3f2ZXqEN85awnGncAgOhB3A1hQDSuhqG06+MGs+MEg0I21x4BImqiqcGk+kF0sY1xoc8M45pOL4mpgk13GVCnJSTTKXr+KSPXFgybNz6w4msqEctn537ZcSt7XKC7j1Bp9YE+E9bvXiU/S5K+eGzlJwfYcRkI9MM9smOuzWDV/+9pGmaYlnq9hLYFMjf0Fje13Izl5ntACdyDxkxTg0pcymnYlcImJDTWkK0ZcHQO3nrRBvWETcbdrEfVuA6VHa2IuhjrtnyGTjYeWzR1zsyJK7+iMpFevcjmTVuxkH176VX2rUy/Wls1d+3ilceELgtnTJs/d5R85OMrL40+Xdyiev7Ln15+Uh6/ZNmc5Qsj/CwFEIfj/jeANOgFJknoJonXwOrVZBeho02iBmkcTDlsEq4XIUsyjQo+3p84FpvOj7aLuIlTcynCvocf/qlml0xn/1WziWySrVR5nj1BOt4mXPlnKO1Lm0d5sxb3wsB8cmFylDcEVyexVFLRSeV8JAmXnJAllfClLUX8xpYRRhu0x6VoUYM5CS4WP7Qol4xGbc5ACRJ8Pr8v3WalWOW2FIsc2wbl3kECqXmlRfO5Xd/44pfPn2a/S/TjFRPnLl42d9J4O90m5J9jt9zYlFL2x6eX2A/nn5Us0xftWbf+UPvWQGEBYukSOQMu6B+nMDE0VnSsHA0kECeUCrz7ItigIy5ra0J7xQK3tGcqRoQsNh92U8w/JhEZmLktBoMe7bO7rLB0epebg632jH3uY/bP+ffYx6T9mVGBvNsWTF8WkF5wOh7Pcnz4lOJvxb4//z77iJSSLGJH3RhW06N96dRHXn5ww7qD0f3pDCC6cX9ugKIoomQEkXw9VczkxNMLnBCUCoruT0/3oxKL7r/NJmk/p7m+evWfGuE78Vt2lRns9N13kx40+4fnAD8CjMf6NcP6ZYKOq42NrmfDJWy4Xj1P+cEsSLLxkhUklCwkOAq4oqQVOOpuIs64nGxq0JVQz7ij5o27pAixmy+WM/67KC2ZsngH++XyNfbLtqVTF/36ykt/vrFletWG9bNnbDTmjRwzc/aYUbPF4lnHCwofXvLa5cuvLXm4qMWx2c+eP//PkRkbN1TNWrWa/j1u+eJJExcvjpzFAYg3s44vfRL+t0nkS3xjCynWFA5OSSRLynVkyecXVH67ol5PpINovJ8YLr/dnoHXLW8MFxXW7i3ZMSj8I0l96SOSyi5/3XNvxxtbB5aMDNy4dsmE9UtPPfNIx46difLpNfI/7DL7kp1g37C3GjV6NCeL/NStbO2ps2c2bD4CALW10f4qDgYDNPymcCtU8R4uYw/H8WnY1+/HcReOEKGKyJDmBj5OcRwItIUhwnqhFpJw9xFg6CkFlTYXTfVqZdf/tfIcAE0d79/dG2EECYYQQBQCAgoialiVLVpbFypuAUXFWRzUvVBcrQv3nv11zxCpv9pqh6DW0Up3ta4uW6uWCra1So7/3b3wfBfR//rVcsl7+ZL73nffffs7HTFBR5D3WpvCDmUdIQb1I01myQTjoQl2MRpRl/r3hG4oVpCF83Vw+kdwei2j93o4WagRrjD/Nw7YgU6IrsgAfQGRcYCTLxUZur5kPuL/lYuuNgU1XoSa+ueEfPon+J1yrD1J7UCC+5VG3BHBHVHcEcUdlSGKO3nPyzABMdyNFOv48MTEyEXCyPp9KK85NAqGGrz6I7y65gckiwz3dgAI+xivtAIDOA3LqyxbS9V3By2ZYgWxj1KxdrMPUEhIZKJWxzrtdWqXG6lJNABmTO6TO6EgZ/pvgvDn0c+vb5z6WEvxzh24q2xeXq9VAwomDR8q2098/X7JuWGdhg3GY64xvHvgZPkLaR2wgixCI1vHWKJpbdGx3G7mDCO77O7d6Eeg+9T6IJEoXP9qW0dDeSvNbVsrcjvaUN5aC9pa0c2ZWrhMKvyhjOgmkGUyEsFkpRLVKsh0dyc2B5YQICBgIe/NBCIEGNktqHxMBISRCV+50v3qzz2L/GNX5i4ra+5/7cXJK/oKktUtLnpWmZsBf4zfwZ/i9d7NYU+YMLgiIyLr7Gi8AA/zaQ6/hPNgCdx2D3ukdEseEwlhjDkuaOZ8eO9b/PGA3n2za6oggAlxCaLjSGGvi6/CKXAHfhxvwhtxbhtLaVQsrIM2+DLywL6O+mUrO6a7GfRIcPf8hNHZAIBE7VQd8ASDAWfec3ESdiGTC5nSGsiiwiLUtMnjuEOk1kzFcI9JHoR5kz0Y+SwCsXdhGH0VKhzHp/+FzFeRz9+O7fCtL2Q4AL8u2e72RcFosiLP9wIgHmY+hxmEgGJg84/lVDxnGtpH+FMziw5T/GGx/Sx9V+NPbS1/uvSGcm/t5vGnTEK3rUG9y6yEYO1+tfpYOon3TSpILhmHhztfw/bCn2qhobiwdDW+fQN/CjstfKZ4Dj4A9dOWrFx2S7KdOD56V0TLD0s++Qptwe2eLpq+6O1Jo56aACCYSGT3GbIfW4Kuj9KLgIabbN50LDdy1C0P5CSL2U+190OAThfGG/zHkIjP1Tfgj2ByPUSwrYiu7925+a0D27bugj/KF/F1OBh6QhP0gEPxrZ/ljc/fsONrFTee28R4g67DL2Qd3IERJIOHLwGln4cGSUJdTxdyhgDi1AKL4NMYAdkLvyXzDscv4Os/X3r77Nm3JRt+Ef9xEdfgl8Wb97668d7lQzcAZDjMIDh4glxAaHWfDV1JZj/rSS1tOuz1hHmUcIAjHG+MklgeL6F9LCbnn+jtWIJ+rI8SzjpaowWoDFuPSrZKXAiAE5+ZjCY9wHwiifwfvmXsI9wJMhnuBBn3B5CRXWYPc85tcJTWCd84gtBCVOTYSOfNYvNOJnxzgfBNCMgDJG7zSAeR2NXUTWzOuYmcC5VObFq7NxloMKYVZwDIYliIk59EGoTQ8FMi1WHihc7472r8D34dZmIIYUsBXXXbuXHroZP7iteG4MvI91jOCtgbusEO5K+347Q8e+MPb+JPbT/Gt4ZtDjppKBnYmi4D3IJyT8WxGL/UbqKsmPH2vW7kQdLd4LSKMre9bogIAvLe7u0GiyvOul0mNypGuE2h989SwFg6lJAPH3RNyQJYyWiVDLWO6XV1aHWtQn/HIrSI4vwGGfYxf74lFwHn0WS/ZYX76uoIKFu35IbrwlVyYQCxLpa96kTTx3OvJq5zuRfv5Pnw7hyqq8P1Z75rABK6Pm/yyAWS7d6fZ34//7k8f/ry4ka6xjKbeygnyTXR9CbFOhNBTIUiJtZlQleZiHWo4RgPKCvqPoxRivhqEFpQ55fr6lbBkzDE8TtKxt+gmY6VhGRb0QTHkw6dul8oThJo+wjtwodgwulWsMINaHf91LqjZPMpvyPTOJQPmKOhI8f8PFG13EQvVGfduUdgdUUc7AqJkgqDxNrKgaMhs+eobTNFT+700efrUV5FO30KebG5Uc8EWtlONUbCMKgzknfwPPyXDJ+HyXX+Mu77L9xf9q8jy7JPHHm3L/wDzYL3tomF0LEaU3YHPO9P/D/xPpFcNlR9sDfKQ0VIyDvYAkWjZCRQzAmOFb5urd0QeRq30fSlk1sX8kKZEurossFEhcHnyoTDl8u1YiS69x3B9zwSWwMExpGYerP/TAzKwmQIe+FjUFIzXI7/xHfxIdgdStAT9q2tfHHfu+/uf+kjNJB8sB+OIDdl6AFH4n34L3Twt98O4jvvXP/tEFB10nkWhzCCLoBffFVBMRMFCoqJUu7Jo9qcQ5WQhel6UVXuFrihDj12C/rgmlv4Xfj4imeeWYHfRW0c30q2f05/8nfluilTqH6k9PKT+hJ6GYEFpCu4GMj0BlevUyth7YJ7K4qXwVBu5hBhkW1IDMiHUy53QO1z+HbC7IyHkG/FrwOur4fAz/Q/oGEDoWEgCAODHkFDdtGcXDTnCMq5zh4tAL0r8H4kpavGhqLpIBNRJVTz83QOvA09Zkyd91RIxN025kVT8WEYuGH50hX4HMp1PC/ZLpyZ9q+OkeWL52TMDTFb1nadMXVp5dSnJy9Q9tJwohNfko6pURM+HNWSXLSkiJtbsnyG2TXfxfFwS0N5+AN5LeLfk+CaalbRx3ANsgkVK167jf+BYVf/gGESurZtzbKynQeu38YXb/6EX5bQb+9sXLEFzhw+vX3GF6/ZfsL4bXnqqum5OZM7pl96/eA3tz6Xly0pAhAEAyCWMjs8lpcL/M4jdosEtVlJxXhgirkUP1GHnxBHE/PJKN6sVGi0nNDoFpObCZzc5HQCL2Jc1JAPCxfF+1idfOgj3sJVDXfxqbrX12+xS7b6DrXYAcVbQnV9h+07dmwXqum83gBIErOT0h6ti1Svgj5NhjuVyQPgGCjm2X0hcx7M1kRooc4DKgqUA2AuFBx3fnH8AwW4oHC0GH+3L9MPbQCQf2TPuZTjaH4+bo9y+oEPGxL9IFfbfYkSzHAPk61ylpwjE4wKyA1qmgtMS6QQLWHPpkMRHYZTpdFCH61HFGtTIrRCc6KRuj30nxUBCMOOwggIr9bgFy/iizK+cAm/VAOXIklse+9LnYfY9m5f0XTvOnueTgCIvzM9MZCzvDVYu64bu9CRCx3brjqoeDokgUJH8jwTKfoEd3emyyzq/2glwTUEZ8DP8AVcRf5dgafIVSthCwp0tHeEojDHRXQJfU7X1YvgdY3g5QZ6cnhpZn/AMhdEigqdGRClC7oCqqHAaIAYNrITG6pOLWguHAm9sa4We0NvdANV1WdjiPTC83TuIWTuaYynHgfcdA+1JewiQCzqxW0bu7vEwj/M0IinwRkTnIPu3PsFfeeIFu4ePbpNHFi5Qdk/S/FhFCSvBTrQmuaUyJS8Jc8JFaXYgdrxKOiFF/B4uE2q/ueVI7rPld8ykZxQQWNOCMVqtyP5KmUV0w008gZRM18weD0Rhy865yaANFUl8m6WjsuY0hgTKbXQ00qBl16S195pf0QeDCCIR+eEeMWP421XpZaC+eZCZJgOCp/C6Ndg1Ccv6GU9Ooe+cbSFuxMSGC5CQ6awjXnnQZr99YDpJtEo17b6ScLmDz5g3+srHkZm6TgQWX5HiRfY3yJDRTCIBYg47TQ3EguI536ZvstWkibUTqdDOh28yXA/rXTQWwwWY0Uhj6GeaEHmKuxAUC8ehqKsxkeh2AeEgGiwWcE2gGAboOcEjmscwUumaSUSSa34wOusF7ELa7zgtAz3Eq8yr71eb3mJxRXZXiO8iEdB7xAOrvFq8ELFtgBOj9h9A2RmQvMxZC8X7WKJUKJJLHRs5YNnVN+bw2mwVVE5gqeXj9DpX4WvvH3n+yNj8nJG/QZ1dZVHfm3u67iSu9H/o4mz+7XtE9lr3Jvbdr81YuDIvunyouMfVuDgrHnJb+Ym75vQPe1JgMAiQpME2R/4gGAwUKMtfbWiT8+rG16i0GSJiTelgngLhgXJdNQ9YHkGH0Vr6nz8lGBEwsWThZs7+Z+p67Q67/TFuukL+xWFBE/OWVgM/7mJL/fPXi37O17q1oPIn/pXqp/IwJ0zu5dvpTzUj/hQf4p91JiJYsfrtbKdZ0SWuhGqaWbNl47lZtcYt9XsR7Q4IgYJjeapCp5GttOHzr2AJNzwdk1DQ01lnYguzsh/trj4jQnZ8rYLMO5G2HUY/+Nb8tD5J7aEbT9G+S2H0FbgacuI5qslp57XMbyF+N/R1mhgQUdaSBWpROetTo9c8c9zLp0csspad8Y/bkPBiUt1Ty/oPSk09Kke82eiZlCAqd27oJx/fl3eKxuG3thi75IKv03J+uxltleGEtreEbOBH8E9T4O73nV7BAEdZeygWHtZEPGuS4LKSMkHZ1u7BNV0LmSXQgEhNzCTBJTJoqM8wQKmAuEQs4Xmn/pexTXQ+8x31xx5SF41b9TqzD6pp/YPm94MwTcmmGDMjTY3YCLEf18ukxY/3yFmb0IPYV/ZZClgXCmAIAoAdF6OAWYwABCWeJDuRnJhdH0qSmjIJwC9ubggrebyI0KSVbDRzapJptHE5dkXXqi0hT0RE+DbMSg7+8IFYXnFwgNHPT0Oi/KwAQsr6udSGg/APUU3xr/RYAxwRc2F4HpyofdwXgSSi0CKp54PAwby4oU8RZsm2CVRiSCw7A2LuzXFOgN+OFmw0ep/CuOb2f/uEZeyvvfSudZVw078UDdrQZ9JltBJPRfMIVyEYFpOnzX3jn/2U0z4B8Fh02ZMycwi3LT5QGYqPJ+c9flLAAJilot6sg+MVD+rvgO/CzihojXInKuh50RKgiIQw3zY9lR82KkJO/Nf/6hu7Nju08Lr6oQ3ew0494OjCG1eVJwcV/8rmZ7x9ToA4BJywXI2Gq2nd/VxkMEmqbVesraew1m2uISWLYqdoftXAKAGG+4J15Lf9SZPmcFJI43RQ5aP2xlEDvmoczRX56C2taxZHx+WMFn77outO4c08+lkSut+k858b8WBSjf3o5Ju4DBxDkMDQLAYADGF4KGn/K5OzFVO6h8d63FDSqznvw/zwCtFtbWF0Ae2wjuJbXEVnsORsn/9UriHpBTszLZR6c3Hx3ybjo8RkrJ1YvkvIM8geyMcjNY8h15r53Kblhej/DZRLsLIRRgz4vk9E0xtHTPjKLMLX/nyPAbzveL3TZi4LaLT85P/daRuxIg+T/mjuoL8HuNakeVY03vAyJHDxl7+0TEdrVk5dUB3bz8PRxZas2zGY3H1V8XOynMtBED0FPvQvcA9F/covAK7n5yjFyIXDlRR5xHNbRa/v/CVI3WF47pPbU1w25WT98k5xxD04txx6Yn1NQwZRT/FEVx8QBhIcsFGTR5TDerHW7bBfD1eIpnfTJ15HWHaSFrPaCZsm0jj+ZEEIx1RQ0uX/3xt6bJlS3/5ddnSurTUJSXpGRnpi0vS01DkrZ07d+6oNd3eQXzEuj1jRo8es8e0c0xhYeEOhuMiPJLiqNWhbIk5TuCkhwdvrPxP7RPK1+Ym7ZO4S8dz11rrPvGP21jw8eXaBfN7TQwJmdhn/jz4zw18qUuGo046/0yvvrgSO178IrMzNj+W+u/NjL54pFDvxL3/o+S7qvI9XLj4kYir0pyg/hDln7/OGnSsrtMzg5ny7zEuNHR890bl3+fJJXcjkJyaRpX/weQkeCch9auXnXsPvUPw9gbdAC82VEWkd42p6g022CjAKkbAKTSA6g71itCIdMpo5y5DO8d3HxFYd8nQdvEAvwiDMEJMSXQYxM67c/J1EoDUThfOkvkjQZnGItW7xm8EFr+pGCpMEIjZPVNYTl6U6qGKF5sdbEbu6ZsFkRf7oGbEWTA1g9NYcIenqJmL9dhCq+1DQ4kTIoQaQ1Fe09EfZ12Ha/SHJYETrYxp0JWRS46euHr4+DUS+hk7dEju4GVnjt069sVtGf0gLsrNHwsjknoEtd1a+syHlevkrJHZjz2WFRi1femGg9+ulvMHPaHICnPDdbRAygRm0E/jU1M6qIUsetcINl/YRG1cN+6BaXWTL5V4PtRMUfjFrLgcVKv5wDePHu3cwTfCJzB4UPvl2154QcrE/1Q4Xs16TCfbfYy7X0aDKqBOwW8ekR8eYmcmy3iGVrU37zloTa6m9Hq4ExGrEzGqaYVQ666xb1bV5uYNmRVa9+WeQXmXfkMrHLPWFqenCM3uHQcQhAAg/EnwcAddeCnGMS/v4iESE0etEalOtqIslINICfNI5IwrKdEZK7zTXDZ+cw8v+gIvvAcnDxmCztw73ijHwwGQqsmFASzmrAiNNqUXTdsBD5j5Is07sMBWhiedOQvSvINEyw6IL27vRWtW8nRFOsLTQbp2OppBJ7ds0FkqxxAWInU0nW40G61ikvzKNfztiasI/nQCf3vtDfn7cpgEBXjvOPrRw8PRUuzs8IDobwCBBQDhJnkOT1DM8RgnXR8VT3LXeTir9kC1PZy65WPp4EuHAWSgnwjVdCSRpmgZ5h3sIQ+TJ8rMTzdSM0IQ6IjEj6EZvw7z8Y3PPsO/wXzy3hedgE87rjku0speFIbMCu0NuKdQT3A2gWGcVNVUOel5VtNwAhWxRkrug0pIkSz8KEjQdON5kfIBwU7W2GGJNN74i798E3rgjOhdZa26hbTw6qDvkh3QBs+C7tD+FLp9L3TaPr0biTgMSx4lxgBIdBYQqihv8nvkPxKbKiWFSetRqOOa0OPo0b3om6odCn2S8Da0Xk4FrUBbQMtjQCxNiWa70doHMnC1gmadmyKjnVH4eJaHZzLBpInSo4LKF0aMGjXihcoOo/oNGjx4UL9ReFviH6+dHj/dPn3i6ddqEldbXp5/evz+mNj9Y0/Pf9lC8XgT18KBD611htTiG/jSS7hWfl/BuwXBe4YG71axNj+Ctx/FmwxaWW3Xmf0Y3uYEBV+GPlspiq/VFKqg36IgZ2he3tCcgg5HX8wfMyb/xaPfUTwn7GsXvX8SxXN1Ys1rpyeShxh/+rU/EhU8ZsAl4gUhFgSARGAzECSaqly2GfjqJxb7JTdtAXRHKva7oocjFffQaU1csC0bvD4ncUj7lAGvvr5i0Na+CYNikweh37d+mdm9fbtxT/ht+SSra4eooh6Kv1KGV8JSsTPzV6IYFVUxpqc6EFC7nBb1y5oKa01zVSn1UvBKoQrC60puxFNokCJAGJio8cU4ueUaM/GkG5iObmz0uO+xEG2ivTBV0zGQjuUtm4isKF0/LLjCuoL4+MqTQ+deQsIH6z/+6PTpjz7ecVBAlxoDLNLiMy2v/xoMIz8Pq4ZtQq583/KbLVJjoAUS7QjEiSTfEwoKwH0R4JpG0O4m8ih2i8SqZC2x2gwVLZGw0AIbe4CvhX7s62otmglX0S1oJYwXSSgcyRsDZrIvf5FiotBX9REesbHSczvdf608+5OIrhcNHDTKHS5DQ4r7b+t89KhXef7cyt/P3jxnlycULpn5e6Wy3nkNP0vZ4i1WsdoeECXPB1Uj+QLUmAe1Z6QuUik9TYxMdNpbiWa6jZVEoi+xGZvHxxGTF4mpvQ+NKXyn5+I1Kzpak+LXrVnbw1Yw0t5z/dpN1iRr7Kq19bNrXnu1pubV12ompXbJTF267tleB0YVHsreuG59Ykpq0qb1W/v8e0xBec8169G8QxhDdOgdCBqUPRQIgPg+2ft+YKqyJn7kEfy4TGIzrUFJVYm3UYi2Az3d2OQ9DfWSwWZk7Gfk61bkaqYa6VjeTHPfw5k0sJiUf6SlTvkHLegpmAW98dPQF++Go/HuOrwTFpK/YDwNGoQOaJEjofLpyps3yYBOsbV4hsivIqW/ka4F4KuM7FDZezDWLsmAvpNiK7ylYAnRsnCy/ajF+8zPP/+Ma4UW9T8LH6O/AAK5uLW4mvCqldjWs1hni+qb0t80u4c5c5Kp2tywOVWtjHexYe0dwpSuLK5Nyt4ysQO9G0Z788hYHt1kpTJXru5s1yMjTW6KvHkbzgLTyntzAgUXVw/tn9UV1/zyA/6UGLmvzp27evl7tT8P7p/VBRqv/g71JMe5ekHp0rlVt392fBLVJzwxfv7R+MdDElOegSfyVkZ1Wlnw1vFT52U4d/Lo3r2HJWW8++aw1e06rSp45dPLJ+XC5YW9Bw2K63KonUdAM9PAzkOHJxpMnn4DH+tboOyT58WfhDnOtWnFMjCwmppROrVc1VtHDH5E+YHsUon8CXNqa3HQrVviT2fOnKEZi8GkruEHqQq0JPomHsxQ+DSGLEVMI2tayYWV7juLeJ/HYkjht6hR15ZISmox1u4ZaVFaRu0GT5G8KzeKfIWeqFkgkXaTskI9ZvO6+BTO6vtwpV2H9e4ISvKfjeIgJNp27ztyZN/uchFtGjYsv7Awf9hQhzcc/OdtOBi/cvsv/OpcuAe2gZFwDy7A5/G3eBQaIG/d/eVbs974eu9mOX/gymmzn342Z+QyfAdvhROgG9TBcXg7yVknQxvui4/hKtwH2mkfAqoQfFiNWTR4i1Zf30+dUJ4tkWnqhg4hZKCKCFSz9IemXlYvs4phfaz9sp4UZQXrY/WouCJdn61HJJdyRn9Bf0NfrxfzKjz1LfSImI/6gMZ0iforzMmMaFzfDPcPI6ojrkT8EUG+BSIMEWjaQeVamHaQXodECMWEvk1lVCKbzqigkW4egmVKn1mlrzz3bPJjXZ54Acqvrl6+W98Mr7BOav5Mj5zO6KgpNjA2de7EKbOtaZlxsV7yqNK1y/Fx65Co0s5hEzLaR8coteujwAxhlrAJRIDqvy4BHaiGXRsuAQhK4EzhqBAOJNCccm25IPBZQponO/qxY5mQBWdC8TX2W86+NCTTqlwgqnzrCcygE0gGa/jMNl9j4i1y/q5Jw4MB3ibW8BtbUR1wJYDk3FqYvFlzEVmlFiTdZg1oQS+tseX+mm+F+luVNmFbdDWpvKZNSJ1FbVhCw6dGDf8qpR9+TZV+RDZ2JQ12Zdm5WoaGh7fCgK1vpianJeo8drqLWb32lHXN71NQis7xPAtTXHj6DfyW0H9ZSfKw4KCneia1zTQZTP2iErp3XZ6a+ERnpq9WSM2FfCZPDLSLievSpGuS72iLvpGa76Gyp0SwoVXSMUb/ni60d1flz1l3wugfuJ91RySF6U52ByBD08vBtwwrkQRNF1HJzqJJ27dPKtq56sk4a/fu1rgnxXcm7907efKOHZPjuz+ekNCjB5OJIxquCXWSB8HLG3SluoWL4hHF0WQXpV3ycle0l82LU6Z8eyUkI9pFl+IbvAOO/QaG1x8RsoSVJ/AMuOoEXHT3chWl41NoJ/pKOgECwRjXrgKVMm8B2ssAYLGS1Z1C34XQevFAzV5H1do2A/SQTj6CFWyqy4CkjtBXjv2wY0Yba0JqxttIfn39qp0FsxcjmI92rocg4fG27ZJSOsjj1pfO6DdzwmQZQDAKlaHrJCcdBT7URBoJ7uUy0liItFCCjoHqA10OJE/wViD1UwLJAwXTyyl0KKNDOh1q6AfZdGhQgOkzk2+Uh2qkZFQosyiiyP6LgsUHY6PSo7KjBPKVKMJK3lHBUURmXo6qiSIC8gNyq7ytZlv6to2i3w00KAHtTk0QRY1SaRsB4+H+zNTMtPh0SqPSza93T328Z8XmFYdk9Ha31Ixe3bvNE5+O7xAZ3y5UHjV71uTE4QH+I7pOnT9nqhxtjYtJSlyi2HuzST7/cWc+n+rCdJHab3RooEO2SLP5IqULeVdBE/VE3rxFPxpBB286XCYf2cD9fD6gpQACaxQw05Q+9EK45oh0XMb1bM4NJDYczOIAOeAh4XMuDuDhEizjC328XZtzNEEopkJYjBguHVMweErLusu6mFk9U0dH1JJQyqaXZqemCM3vHR8Un9AiCKdJ5xWapAEgTGU1ia01cdQHGhUQUFxwstVCAW2vsvigBTnXsAMK1+DjyA0Kn52F0t2+7Df3of5wg9BFkVNC7H1yKXYO3FBbi/r/ocxfhDPhSQLpDTowf9pNZdipLAwgcnHCZqLWl3AyS6RiGibCNM+MQa/u1qX17NY/REjw7N937Jxn28W0ay2tUuYajLbDLUQmSqAH3wf8P9j3XHewTeC82LD4cLjlwxKYjrajki1mJudmEXuknbMeNQOQFeREsL3Eg9ojdAghA033uB7p8D89p2HW4T17jhzevffIW0MG9h8yNGfAYHHmpvfe2zR986FDmweOGzdwes748TlMR08EW4VVAjE8wGd+AOjAZ3Aqu28DQLpMdHUkOA+Gom3k9XPoD4heAt+gdwEABo5aBB/lOzKQqhhsOHBr/C75zjkhmn6Hr2pk3ykm39klnWDfOcu+840wi3XNfQsMaCf9juposO8ABEbimcIXYmfWA9YDEEl9v/NL///p/JJZl5eye6xO+zaOdYPRQ03Q6yh9ct9h40f3m45+E+CfH35xfcO0pGDS+oV2r5ubm/1sTsGkXNb6dZi0fnUcPhjuvsZsKqUnSReKIkBr9mRZ0APmAndwwEsSxWjySCqMRYWZCT+CwymMwRWmuwpTBV6BQylMM1niYUarMMfB6/ApCuMtu/yOlwozESyHecCbzEVhaCzIi4hiLe5lKuwxmAEPUFiTRGFNylEwzLdp+AsA3WDJxnLJW7iqz0c1PwiiMxRkHyHAPJdOFrsnkJ2+CSCtMNpQpw3wLrTAl2vINGVgL6LueAodcslAO+gF8o/aB0b2By0k/Dy4fqE39ngHXyJ2wRXHXB/U2vGTL9p69yac00JS2rmO4fHHcAIchxZAoOwbnEr7nghdIgDdN3PhkYZ6cp/197C1bqOsNahqXGuZ0V+F6a7CVIESZR0NsguMlwozEQxvXCPZZY0avqC9HGzOdsqcDUuUOSUJNf7eGwCghTqLCjMTJCn85abCNJwjMHMZXgpMVUOagpebrMK8T2A2MrwUmIkNgQpeDIbWKUmN/ABaKzWzTN7Nf8QpC3ZBAk4WuExYoOKscFkgWjZdoL1PAlXFArUjhGABFZcjQSP9q12LdCSuL4haW4GN1S5q05bRonZtERvxyPbt91u3WmEHa966BAW0/lU0Q23hQutxR9bChfswmit9D2yfdXTus98b95nOSSul/0CXSGA6Ofe9H5xGYYIkDx4mQYWZCT+BUylMsCtMrgpTRaT0ZArTSnaBma3CHAdfwMXsd1xhQlWYieANWEzXLoTC2EIMtpbOtYOgN/hauCEuB55ExgYQx8K/QoBG2lEismMPdGykUSsjhIkQmiHUQdgbpuCqTTAZpmzCVWzAx+BTsAvssgW/zwb8/haYiT+gcwgEn/2kP+N3EADCCRUH8B0HfPywPR/ADtWGjNqH0sBbcGh7+tJWeYlmN5XWDVbER+ND1LdjiWdqJEDiyJmhEum2EFMhEvppGjr6b0wftKk0bwztSih47cn+m5b0GVjfM8wiwzux07vtexdV+ptk7BOZH9/Y59G69YaLA26XKW0KJAp5acD3i/Dd7BWxUBjWpt1vB1OLomD9wRYtfjvE+IfVsbO1SHLyhlnZs0bJna2XCmNRYWbCT5U96+cK012FqSJ6dCiDkV1gvFSYieBNZc8yGJsfkZSqvGf10GzOFOec65Q5vSSFrwECmwjMQtaXZQLZfBU+Z5raIfBwRhrdPegOp64d5OpAbO6urpuPVWlfoQU7Rh+ntQ9X/FULvfGt2r/q6v5aQf6TbPjXusqqWvwleReOA1eNHb+G8e0z5Fl3ysEgEgzSSBxfrhrFtbVGLzUaB/4avgrxkZh7SZqqXZrrGt1dky8wcQVPccQMbvRf4Nzav069+t1M2PX8sf6vRHRsOy8tLx+/t3BE+vApYrcrd//9xrSzaV3xTysrKkKDjgW0yeneC5rWD/y8Z9+CTcuUtWB1v9IVshZdnbpkMQika9FODmBrocJcVmFmwiQQQGFiXWBkyQkjg6oUM4Vor1MgwH0YiwpzPC2K/coDMNJpFWaifwvKRR0oDD1eK6ZaO19vFadj4DMwjULGyxQy3mBLdsoZAcQ1XJeXin1Ae/AY6AJOc9XNmkO9Hl3qLLBSZ3s6CKYrlh5bUZJelk4rntOJ3shOH5GOpim3iitq0hvIC1GeTRc624PYiy2dO6GGapk2fLdtrOaSRKut1bTztDNfH/rwCB5LcPB1o5p4HmwsIRWvLj2Tlfz15opjt375NG9Q3qRrSK49Oem1pPSXx3x9wzFEEFevGrWw35OPnaqflrWh7ZmiucOFjPHTPRA8OM40NKfHqAM79rzeffi4YZnN5TWHumSkZ+G7P62Rl+xv3/6FmF6Hnux4ZFS3zGz0S9kMqdWEUrbG/XAqrU0ma/e4065JY3YNq6uVvif3n3Dy4hLQgnJIiFPfqTBXVJiZsLPCr2EuMLLMYBgvpvlTiFCdAgFUGOmMCjMxMIhyT2sKY2ttsFkUPmugzbeljB8/cto9Y4HE7B7VXgFlAKAC6ZQTRgYzW4hai4bZT4cJTJ70B4NR7B4LQAxKp9o9+wnMTOmgCjMRO4AMvBmMq92TQvi/j3QTWAhX7wSkxJivPAgOIiaNV5BOqc637/Uil4AOJq8ges8Um2EONsWa0k3ZphGmKaYSU5lpr+kt0wcmT+IaBpkoTEis3dcUwvReiIm+AF/K+zQS1lbD1AavtvRDczBLGepcm9r8CAv6Aqf3TjUjCTpLkYnxEVSi0fwbDceQK2fh/uJRk/CX3/+IL0GfSwO3xon6/hn4dp/vLL0jew7Y1uVsH9x8wfaw9eMWbtwq6SfgG/86ewcfhwHVP0BzepyUvztlS9E82aeVvsqY1X560b3U6n1LO2RUPDvnTbpOrL6QyZ9+ivwZyuSPWSeq66TU/TH+6u/kwT0Kf7WWFSgV5rIKMxMOVORhpAuMLDEYxoNDmTyMeGAu2aLCHB/O8Il8EJ/TKszEeCYP21AYWxuDLZxxhEDwfFVMFA+ynI8nSOXPaFOsVLGaNeOowQRAT5aiXs9U2vvvxgd1w6k1S/7ExHq9cBsvpqly9PiXH1y8d/simY/gNZPUHh7m7Cq+1oQZWa52lcDbVa14u4pdqXaVkTCMakpRHlKNLOtD7Koc6H41fnTME+vGDx+F//6lw7CoJ9aNHT2+rmUrGUb4x7cqWQDrA/1lfNm3fUBJCYqshfFGnw1f9LhWZrqNP/FutuFs9z+29FnUBqIhnl4nd3ad2RY67G5uJ/Yoa8FquthaDHHyxm5FFphkN7ZiKswpFWYmHACYNPB3hfmDwTDeGIIYhI5BaOc6qMJMjGOSgMHY/Gk9gfJbrN6HzZfrnM9fmS9QNjXaUitJLDDtv+tj+U/ViTbdx5Km1InWdVozvOkyUd07jje6dOfrRNXnY3TIVehwl9EhUEeejgZ0zYz/IZXBrBaEr6XWN11LXUpLxBU5WthwXdeDnYMVTmxOEgvlDxhRQ6KPbjD35jxE+wgj9SppROAseUfz8768ojfzRcP+XEUJX0Nssaj9zdSxUE/ckNRiVpqq0/WoX5y7OAvXEx8oEwrd1mYLs+lJHPRUjnsF1sKO8YUd9x6o8PCEPaEH7ADdYS+9eyUurMRWX6LykmS3Tyrxp1WfAra3CU0QsZdCQQdiMc3WnJb1yMYQ/ribBGCk+iCBGEoJZQkoj3tmwB8aF1FNlUqM5k7HatW4UVpgmjZoIBeSVG0aadjiM5mZJxb9iv8mEmHxycyMD6fxLTL3xs0vLSkpWVyyQLjT2C0zetjwUTCuzkSkQuHw4YXaphkUuff4CVJ7ffLkTjhG7Z/ZSfLsKcS3dAOhLMuO+Cz7QW9dsC5WJ+Qpx3GSbIOORGytQkpl2dqPoFuZWO+/alXgHwoflooDUIR0geXNOrL8lKCWDKcL2c7yXe/7kWAiAhovms6OUeKVzhs6eM6cwUPnTU6OjkpKiopOlvwGFBcPGFhUNDC6c1JMTDKEyUpPgfi10E/6GxhBAmAlU9qZ3KtpqMtLe8ugXngprh1kk6s1XQwHod/sYd1fsEYmLJk1LOlAXESSVD1i+dDMmLD8VUMz2jM59xIqEn8WOhJL8KvzIMeaweJIqEhy3rOBsWMzKH5dhL/hcCLDJGDQ1GL6siZQo1UwhXV5blbKRfEALMQ73iPw3YQ7MF8Lz/Yqg4fKCaf59AvSIPwczK0CgM2B78Lh0Is/C5WIi+E7F6Zc9MVXoTv0IPhRXNDz5LcjwEkmc0/CJwEARpceDp3q7xJc0FsM/hSDPwX7MXjed/RQbbsuDWa0HYYCiXCDO8WEfRbO0JbYCAc8NzXla9iNjk/iT2HkT+fIGHsBKP4pbEBdhTvAi3CmXfAQol0j+c/MLhw7Z/bYwjmCJX/O7BG9R86YOYLmJ8FWZBUOApl8L4Bsa39ahRoG46EVpvz9Er4CQ15CEXgaXG6Ey+k8Awh8CxVeovBGaIJhRuEeDMFXXvr7b+EgnmvEc2EZXEfgY0CRME2KBAJ9KhDLjqJLjITmV+lhzUXsEGb2/OmogzCIyGQP0Ayk8/H8+31HdllydzbjeAoaycJYVSmq9XIelUkrnSKhVfCJFNCXpaVV2CrCMyer5NvC7G0221Q0w3EAPonw2/SZehK/4AqZOxqUgvsh/wfKsaIjSTlWbDQ7EI2zs/T8YQOAnupMYMhR53bvSHqcDhlskbyrZ6omd+jR5y1cjWeLSa1CZ3KQGGTsLw5om+os9J+wC8ftWPbY1DjfpHlpN/F3G8h/MOxmyvQs34RpSUu3wzM4Dp6BJ9HUV318jnkbYIuPUOWiSv1x2NrgfcJgPFDcrHKRwj97UJHwvdDx4Wf9Ct/T/DYqqlLWyx8A0cz6CFuAyY/qJNS2HjWpPfzJhf9/oseQqvkjL7xw9ewTa3PD02Y/XjT2q6/QuLo60muYW/llcMuTphYFBbmk17DRDugNgBAuWAjPGUA3Dc81d00lIHeRsh2KLYfajLzBeVarnnGeN8950Gz1idShA8XFH+DRHvDFD/EY4bysh6Hr16+fjoKwLEET8mW0H9XwJ7outANRYIsmz95cSznFHnsw726PCmymSZE7s+FqplxJkudpE+aPzpTbHw+GeeStNg3/n82ew3OPzp4zmQTQV4QegaCPpmai+QNnHf+vqyMs/4fqiIfURgwGAG4hOEogRiPTmzd1zjOZnmuXVFO4LIGr5mQsak5mJpzXmKNT8jb/Bbts07oAAAB4AWNgZGAAYen931bF89t8ZZDkYACBIx8E9UD0OZEzun+E/l7lLOKoBHI5GZhAogBOMQvyeAFjYGRg4Ej6e5WBgdPoj9B/I44FQBFUcAcAiWcGPQB4AW2RUxidTQwG52Szv22ztm3btm3btm3btm3bvqvd03y1LuaZrPGGngCA+RkSkWEyhHR6jhTag4r+DBX8n6QKFSOdLKaNrOBb15rftSEZQrtIJGPILCkY6jIjNr+KMd/IZ+QxkhjtjAZGRqNsMCYRGSr/UFW/JbX2oq9Go427QIyP/yWbj8I3/h9G+5+o5tMxWscbE6xdmVp+DqMlJzO1Bclt3mgtwOiPxcbmGI2o7KObO5lzmD+huI7lb9+ATv4Hvv74B6KY4+kdvtQ1FJG4dHCF+dH8hatOQjcCJwPszsXs7l1oo/HJa86vKSgqu4lmdQGjpXxPH/k1PEfj0DaoP7ptc7vQKphrtAksG81RySdb+NnazfUr/vEPiGj+1/jGKCizSSLCLPPvPi8Nn/39X/TWlnbvheT1IympZ/gt9Igueo8S+hcTPspAYdeXBu4c5bQmrYO/f9Z3nM7uM1prdkq7stRw5Sknc2miy+mn35BK0jFGvqGmJLS5k2ls66t99AVzPqpkHKWehigT/PuH+Lhj+E6QRZDDSyRneH+Qg/moscqXIcLLDN5FM5DTN7facniTZzlsY4Bepkvw5x/io7UkeJaDZfAm8lt4kfxGb/MKY6wuI8UbGbxNX9JrV7Pl8BZBDoPpFjjY6+MFVPw4OfndJYbLPNq5I7TxnZn8UVtmhEaSzsgYWK4ZN8gox83b6SL1qCFVKeBGENNNJbXmJLu2Z5RO4RfXnZyuEuVcQZsTn8LB3z0FW2/CPAAAAAAAAAAAAAAALABaANQBSgHaAo4CqgLUAv4DLgNUA2gDgAOaA7IEAgQuBIQFAgVKBbAGGgZQBsgHMAdAB1AHgAeuB94IOgjuCTgJpgn8Cj4KhgrCCygLggueC9QMHgxCDKYM9A1GDYwN6A5MDrIO3g8aD1IPuhAGEEQQfhCkELwQ4BECER4RWBHiEkASkBLuE1IToBQUFFoUhhTKFRIVLhWaFeAWMhaQFuwXLBewGAAYRBh+GOIZPBmSGcwaEBooGmwashqyGtobRBuqHA4ccByaHT4dYB30Ho4emh60HrwfZh98H8ggCiBoIQYhQCGQIboh0CIGIjwihiKSIqwixiLgIzgjSiNcI24jgCOWI6wkIiQuJEAkUiRoJHokjCSeJLQlIiU0JUYlWCVqJXwlkiXEJkImVCZmJngmjiagJu4nVCdmJ3gniiecJ7AnxiiOKJoorCi+KNAo5Cj2KQgpGikwKcop3CnuKgAqEiokKjgqcCrqKvwrDisgKzQrRiukK7gr1CxeLPItGC1YLZQtni2oLcAt2i3uLgYuHi4+Llouci6KLp4u3C9eL3Yv2DAcMKQw9jEcMS4AAAABAAAA3ACXABYAXwAFAAEAAAAAAA4AAAIAAeYAAwABeAF9zANyI2AYBuBnt+YBMsqwjkfpsLY9qmL7Bj1Hb1pbP7+X6HOmy7/uAf8EeJn/GxV4mbvEjL/M3R88Pabfsr0Cbl7mUQdu7am4VNFUEbQp5VpOS8melIyWogt1yyoqMopSkn+kkmIiouKOpNQ15FSUBUWFREWe1ISoWcE378e+mU99WU1NVUlhYZ2nHXKh6sKVrJSQirqMsKKcKyllDSkNYRtWzVu0Zd+iGTEhkXtU0y0IeAFswQOWQgEAAMDZv7Zt27ZtZddTZ+4udYFmBEC5qKCaEjWBQK069Ro0atKsRas27Tp06tKtR68+/QYMGjJsxKgx4yZMmjJtxqw58xYsWrJsxao16zZs2rJtx649+w4cOnLsxKkz5y5cunLtxq079x48evLsxas37z58+vLtx68//0LCIqJi4hKSUtIyshWC4GErEAAAAOAs/3NtI+tluy7Ztm3zZZ6z69yMBuVixBqU50icNMkK1ap48kySXdGy3biVKl+CcYeuFalz786DMo1mTWvy2hsZ3po3Y86yBYuWHHtvzYpVzT64kmnTug0fnTqX6LNPvvjmq+9K/PDLT7/98c9f/wU4EShYkBBhQvUoFSFcpChnLvTZ0qLVtgM72rTr0m1Ch06T4g0ZNvDk+ZMXLo08efk4RnZGDkZOhlQWv1AfH/bSvEwDA0cXEG1kYG7C4lpalM+Rll9apFdcWsBZklGUmgpisZeU54Pp/DwwHwBPQXTqAHgBLc4lXMVQFIDxe5+/Ke4uCXd3KLhLWsWdhvWynugFl7ieRu+dnsb5flD+V44+W03Pqkm96nSsSX3pwfbG8hyVafqKLY53NhRyi8/1/P8l1md6//6SRzsznWXcUiuTXQ3F3NJTfU3V3NRrJp2WrjUzN3sl06/thr54PYV7+IYaQ1++jlly8+AO2iz5W4IT8OEJIqi29NXrGHhwB65DLfxAtSN5HvgQQgRjjiSfQJDDoBz5e4AA3BwJtOVAHgtBBGGeRNsK5DYGd8IvM61XFAA=) format('woff'), -} - -@font-face { - font-family: 'Roboto'; - font-style: normal; - font-weight: 200; - src: - local('Roboto Light'), - url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEScABMAAAAAdFQAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAABqAAAABwAAAAcXzC5yUdERUYAAAHEAAAAHgAAACAAzgAER1BPUwAAAeQAAAVxAAANIkezYOlHU1VCAAAHWAAAACwAAAAwuP+4/k9TLzIAAAeEAAAAVgAAAGC3ouDrY21hcAAAB9wAAAG+AAACioYHy/VjdnQgAAAJnAAAADQAAAA0CnAOGGZwZ20AAAnQAAABsQAAAmVTtC+nZ2FzcAAAC4QAAAAIAAAACAAAABBnbHlmAAALjAAAMaIAAFTUMXgLR2hlYWQAAD0wAAAAMQAAADYBsFYkaGhlYQAAPWQAAAAfAAAAJA7cBhlobXR4AAA9hAAAAeEAAAKEbjk+b2xvY2EAAD9oAAABNgAAAUQwY0cibWF4cAAAQKAAAAAgAAAAIAG+AZluYW1lAABAwAAAAZAAAANoT6qDDHBvc3QAAEJQAAABjAAAAktoPRGfcHJlcAAAQ9wAAAC2AAABI0qzIoZ3ZWJmAABElAAAAAYAAAAGVU1R3QAAAAEAAAAAzD2izwAAAADE8BEuAAAAAM4DBct42mNgZGBg4ANiCQYQYGJgBMIFQMwC5jEAAAsqANMAAHjapZZ5bNRFFMff79dtd7u03UNsORWwKYhWGwFLsRBiGuSKkdIDsBg0kRCVGq6GcpSEFINKghzlMDFBVBITNRpDJEGCBlBBRSEQIQYJyLHd/pA78a99fn6zy3ZbykJxXr7zm3nz5s2b7xy/EUtE/FIiY8SuGDe5SvLeeHlhvfQRD3pRFbc9tWy9/ur8evG5JQOP2Hxt8ds7xLJrjO1AmYxUyiyZLQtlpayRmOWx/FbQGmSVWM9aVdZs6z1rk/WZFbU9dtgutIeCsVivND1dsWSG9JAMKZOeMkrCUi756MI6AN0g3Se1ellm6GlqOXpBxuoNmYXGlgn6D/qo9JOA5ksIFOoBKY79K6V4qtC/ZJy2yXNgPJgIKkEVqMbPNHpO14jUgXr6LcK+gbbFoBEsoX0pWE55Bd8W/G8BW9WNboZ+b/KPyWslDy5K9biU6TkZpY6U6ymiLdUv0Vyi9jvt1boT+x9lTmyXzNUhaHKIcqyEaDkLfw8YTQBNDpo2NHmsVjZtrl2u/kZLmDlHaT0BJ1HTZ45+gbdfTSznJVOK4WQkWAAWgiYQQB/EVzAxYhheIvASgZcIvETgJGK8NfDdgN1GsAlsBllYO1g7WDtYO1g7WDrMcAK+a2UA6xci+kp0i0EjWA4s2nMZO6DNrE4zDDbDYDMMNptIHSJ1iNQhUodI3R4DafGzG8JSKEUyRB6VJ+RJGSbDZQSrWsb+KJfR7OAJ8rxUM/Z0xq6Tl6Re3iTyjUS9WezsQ+7e9L7j24G//uznFl2th/WAOrqPNelG0hq5z6Srk6Ub4Kau0Mv6qe7W7ZQPsxIhPcgeX3sPns6DCDjYSX/9rj3/7ka8bbeNGQXHE/UzyZb3Naqtt/W+FAepZ1J3mVOWPoW7ipYzFE8hSiE3Erfcabyo/I+kF7TVzPBMiq6VU3Wr/FGy9F2y1MD5aLfeG7ukh3SKztOQHtOldxmvgTW/3uWKBeLrqifdSuxbPeNypiOTPb/StfqBbgBrYCOIKkifoH6ou3S//oxFky4jLzLWvTSoV/RrU96pR/UY36Mdx9VzerNDbA+b/M8UzXE97TKTYCcvdY079Fxl8v2duY3vJb3Y3lvbjK+QWdMjScujKb226ze6V0+AH9gHId3G3ghxPk5yZs+m2BVzo4j+otuYZ3wX5ibGa4uP3R5tYufcaU32pGm7er+ninU2ffVaVz47Mt+tHXstTVvae0Cv3PeYTjqG4n5v927ukWDyTnDucuZXdXEerpqzcsc10D9M3nKnmNPFnZ6n7nOlY/RxrdBhYDA7yovKyx/Mq5N0vr6l67EIaA4ne4k5369QP6Kvpd4r8RRjZ+hP4PPkPrp4i832qOJ/AP1E1+ke7uE9nPDWJJ+Jrx4Cu92zEZtr6m93h6H2O7CDtjENA6eSpZOdzwL/84C8m3g93kuyeVN44C/L1LyIT7J5D3gNqz0SVjloc7lZuAc7/RfC3NHu/+dBU8tP6vORAnN/90poeoM+5H3vIaYsM3omo/oYwfVdgLgpk6+vWxvGSuQWfkuMV4v5+Q1TAaIMIr2ZVYhyIWLzCipijKGIT4qRPvIU4uNFNJz8aaQvL6NSeBqJ+HkjlcHUKCRHnkEKeDGVw9dopJdUIBkyTsbD80TEIy/IFKKoRLJkKpIpVYhHahCvTEPyeGVNJ7oXkX68tuooz0SCvLrqiXCezCeSBbz//bIIyZAGxCOLpRGfS2QpHpYhPlmOZEkT4pcVSJ6sk/XM1325WdKC5JsXnCVbZCtlG75djiSFI9uwkwE37hv6Md6G2cx+NJYVzKs3MxtPlJOQ/sxtqjzEO7FaBpk5PMIMZtKznvgGm/hKiKsJPjcw3oj/AIgWgIQAAAB42mNgZGBg4GLQYdBjYHJx8wlh4MtJLMljkGBgAYoz/P8PJBAsIAAAnsoHa3jaY2BmvsGow8DKwMI6i9WYgYFRHkIzX2RIY2JgYABhCHjAwPQ/gEEhGshUAPHd8/PTgRTvAwa2tH9pDAwcSUzBCgyM8/0ZGRhYrFg3gNUxAQCExA4aAAB42mNgYGBmgGAZBkYgycDYAuQxgvksjBlAOozBgYGVQQzI4mWoY1jAsJhhKcNKhtUM6xi2MOxg2M1wkOEkw1mGywzXGG4x3GF4yPCS4S3DZ4ZvDL8Y/jAGMhYyHWO6xXRHgUtBREFKQU5BTUFfwUohXmGNotIDhv//QTYCzVUAmrsIaO4KoLlriTA3gLEAai6DgoCChIIM2FxLJHMZ/3/9//j/of8H/x/4v+//3v97/m//v+X/pv9r/y/7v/j/vP9z/s/8P+P/lP+9/7v+t/5v/t/wv/6/zn++v7v+Lv+77EHzg7oH1Q+qHhQ/yH6Q9MDu/qf7tQoLIOFDC8DIxgA3nJEJSDChKwBGEQsrGzsHJxc3Dy8fv4CgkLCIqJi4hKSUtIysnLyCopKyiqqauoamlraOrp6+gaGRsYmpmbmFpZW1ja2dvYOjk7OLq5u7h6eXt4+vn39AYFBwSGhYeERkVHRMbFx8QiLIlnyGopJSiIVlQFwOYlQwMFQyVDEwVDMwJKeABLLS52enQZ2ViumVjNyZSWDGxEnTpk+eAmbOmz0HRE2dASTyGBgKgFQhEBcDcUMTkGjMARIAqVuf0QAAAAAEOgWvAGYAqABiAGUAZwBoAGkAagBrAHUApABcAHgAZQBsAHIAeAB8AHAAegBaAEQFEXjaXVG7TltBEN0NDwOBxNggOdoUs5mQxnuhBQnE1Y1iZDuF5QhpN3KRi3EBH0CBRA3arxmgoaRImwYhF0h8Qj4hEjNriKI0Ozuzc86ZM0vKkap36WvPU+ckkMLdBs02/U5ItbMA96Tr642MtIMHWmxm9Mp1+/4LBpvRlDtqAOU9bykPGU07gVq0p/7R/AqG+/wf8zsYtDTT9NQ6CekhBOabcUuD7xnNussP+oLV4WIwMKSYpuIuP6ZS/rc052rLsLWR0byDMxH5yTRAU2ttBJr+1CHV83EUS5DLprE2mJiy/iQTwYXJdFVTtcz42sFdsrPoYIMqzYEH2MNWeQweDg8mFNK3JMosDRH2YqvECBGTHAo55dzJ/qRA+UgSxrxJSjvjhrUGxpHXwKA2T7P/PJtNbW8dwvhZHMF3vxlLOvjIhtoYEWI7YimACURCRlX5hhrPvSwG5FL7z0CUgOXxj3+dCLTu2EQ8l7V1DjFWCHp+29zyy4q7VrnOi0J3b6pqqNIpzftezr7HA54eC8NBY8Gbz/v+SoH6PCyuNGgOBEN6N3r/orXqiKu8Fz6yJ9O/sVoAAAAAAQAB//8AD3jarXwHfBRl+v/7TtuWLbMlm54smwIJJLBLCKGJCOqJgIp6NBEiiUgNiCb0IgiIFU9FkKCABKXNbAIqcoAUC3Y9I6ioh5yaE8RT9CeQHf7P885sCgS4/+/zE7OZzO7O+z79+5QZwpG+hHBjxNsIT0wkX6WkoEfEJCScDKmS+FWPCM/BIVF5PC3i6YhJSmzoEaF4PiwH5KyAHOjLZWiZdIU2Vrzt7Ka+wvsELkmqCKHtRYVdt4BE4FyeSoX6iMiRPKqYCxShTiEh1eSsV7iQaqF5RBWp7FaE4o6dwoVhHy+H5apHH6iorqZf85805OM15wrd6edSAhGJjfSCa1KSp0jhWk4gFiFPMYeoEleg0DpVcNXXii6SBCcFl2qieaoVztjYGdUOS3XslExxjbAHX+fyZYFqoTQgdCfnvz6snaPcl/AK611DiLAGaEgm6fRmEkkCGiK++MRwOBwxARkRsy0OjmsJTTLZ82o4OSU10x9WiaO+xutPSM70h2pFgb3Fu9LS8S1RrK+RLFY7vEWVjAIlqU5NdNUrifomza76iMlszavpbRIsQI9LjYezPjjri8ezPg+c9blUG5yNc9WrAZqndEna2etfp3OJL8+6s9e3p514oCS5argkkwfWZa8SvsIiNZZEMxzEu2qs8TYPXqrG7ouDD7jYq8xevfiKn/Gzz8C3Eti34JrJseukxK6Tip+pSYt9Mh3P871dHI9EumTkQkpqWnr+Bf8pvZNABJ7CgCcAP2Eef8K+IB/wBfigB3+K4K1rqGuwVk/bDRoziHaDl3/9z2ByXjs1YMwA7S14uY92G6y9SVfeQV8bRZ/X2M8o7bo7tDK6En/gPKggqTzfkY9Kj5AO5CkSyQMJKm1BDub6SJ6IPM3LteRFZBCm4g2rKZb6iJyCp2W3BbQ0v0Bx1KnpoKIko05WOXe9ku5SZWB7bkj1guDahhSvSzXDicSQmuWsV/3uerUAxCOngyrHFSteucYmprTJ9BcrZrcSLCZqiii7txPq8CdkwVngQlHYGx8OdSnsnJ2TTws7dykClUyjThrsnB1sI/m88f406vNKJl+wMJ9W8uWHHvvblsd3fPT225vLtu3l+PLnH//bs0ve+PCtj5TS7afoc5L63KqKSQ9f3WfnS2vfcxw65Pr+gLhi96r7py7r3e+V6g1vOXb/3fYxWNCk8z+JC8WDxI7aDdzpTh7S+aN2ctRHBOCImuCor+2amSfY89SucCjb2KHsqKdKjwKF1KkOYIHDpXp13UWFzYDDfDjMd6md4bAtaGlP+O11yO4am5ACRlCsds6HP1Iz89LgD6J27SS71ZT04mI1QYaj1LRiZArwIRyKT6VeKdgmu4gxqCfVGeKhfpp1mfcnrZ43d/Vzc+ZXjbprxNDRJcOG3VXLvXVDtJjOgTeqVsMbo0v0N0qE/gPmbt06d8CcLVvmDJk1a8iAIXPmDGmQhakdzz26euCcrVvnDIy9NXD4jJnDCHiz4ed/El4DvrUhHUlPUkEiKegVMpBx2VJ9xIqM684Di3oxFgVBeYK6eXeCw04utSsc2kGT7C7VB4fxcr16FfxGPmy3ChnZHWRkks8OTHInprZjTOqeLbt3EJM9MbVDZ11rOne5ijJ1ATaAdjgp7QUeDdTEbwrmOGgjV4rgUzkmB/WAHhXBRxiPhj+x1HnzwMiqx18adtsa+lynLpP+0u81bumM2w7d9/Hpyk1rR2y7VisRTVzBtEEPXXW12q3TPSPLJtN7K98YYxvz4l+rNq+dOWzB1TO09OuUMfM+/+th8ZGBt9ZFZlVffw09JpqEzJEruEN9Hr1pYYeSroPGLgAbnCb0IceY387WvbbhsqkiXeCvkVGN3nmauSxb6EOt7+3XThK05Ye1TtxEaSiRiYdQxc0YbAWr87AveQpdpCidSpzsc7mBDdnkYRq/SUp64vDhJ5KkLdoJrqeTjud6l9C/3B39Vdvu1bZHfx1/7RiuM17brXWivza/Nl+n2puu3cUtF7q4nKJwPIHLE1PQ/fiRow8nSS/TeO3EZkmrKOPc9EYv/QvnK7u2JLpXe8qpPRx9bwzbdyo3m78B4oiD3EMgpIKzoQVUcbL9cyB7EczExZy5kp1EIQjnv0NUQvPfQfd+ovP+TPTqDoW4FMdeQaEuhdvLqZwjP58qDnSmVBU58Dc20BQeY6jE/IrIh/ksv+gx2WiOJzWD3iiMNdO+Aa3mm9vq3rvtiHBr6Uw6VVs2t/Re7YuraCft4560PWH77U+WC52EHRBlbyEKKVBMYZXa6hUxBMJD70is4DQpwUPKo6OEsGutY3EcdFwIRSxWfM9igo9ZLXhoJZZY5AW3D6EdXL0clPvTyHT6utZvOjetnH6i5ZdrafSYvofBmkadZBfoTBbuATXG2kxjQDJoUwKSKxY3qszgfhXj4Iv+6pe1E/p1OnHdOBe3Biy3DV5HpVI9/lBFKAAW59XyXtREwB7G3nyd6Ddct9JS/G41vHQk6+G77WIIxl7feICXQAny3nr2o18CsUv10vXr8ftp5x/g/s0wkEwAMiHwgVX1z/lpmKZxoyZEX5gtdTjzKcNMi8G3BA2f3I1EbLiQLMW8MTqVFN3vOpv8LjAi1fCwqk0oRlZ4ZJc7HHInUhcXbMN59PAi695x8ekjR/44feTw/1SqGzZsU6qrt3KFtB9NpCHtA+0H7XXte+0j2omavv799Dd0/Lf/+c+3QMeu82e4DWItyKI7iQjo7zjcEeVcGXsLEO8wsQjACidslkeBC9SiGzNoMxMRMjcLRL6L/rtSNN865Gw/sRvyaDJgLBloToKjiAMptgHFaCRqPF8fiWdXi09CLUvWAZPMABPYpSrBcpIHPyDZQdU8Eh56HLByCrzrSZTdEd5mLQamqDbgj+IsVuLliEQ8xSzIZBvO00T9oI6FNOYefcHJ4h+f7Dr2zGJtMsf93FBJjy6c+OzDGzZPFjw7Gg7vqPyfFVo3sXQEl/rUOyOWrH91JdIx9vxP/GmgIxe0JtIW6RCBDrEtbkkEZkRSkCQvkORlCMObYMmrtce1TYGQakfR5unuACID51L8iDcS4DihADEFnEKUgRBDyXIp6fiuDMdyAaKTiJzOMEscEN4ewYcfYgegjrYsdsQB4FBJVnGxYpeVNgBJ3GpienFL5JEHxsMOGPU5jYxhyCPYJnMsV/7Gs6u27nhp2bI161eueLimnBP/3L3/h3nTliw+d3CP9jNdJC1TXnj62SfL1sxesvbFxdLLx+p23729fc5rc/Z9fQR1ux/IuT/YgpU4yRASscS0qJbYLJwdgDoAZ6lekQAYuwoUS50SF0LlVvhQxMxciFkCJloYPLagN5FRuWyoXLRY4WTFwVSMhmVAkqBnkJjkmPpxax44frwi+h2XKoVpeV++oSGrVHuclpfyvbiJzD9sBZszw77SyX4SSW2UW2qj3FwoN4+tvsaR6jLn1fptqS4Qmd9WzxC8s64myUkceSoHcRxFlOSMAXPmyx1O9OVOh+7Lr9p8ZjH6clFxuhTXXjBixbN351UP/tkVztpqvA6PJy8CrxkPZTwUlEBli4nizacRl8erw2aqmtHTpxYrSaABbtRsB8g3QsxJxRfIFERpyvEgpO5Fi7q4fV5wBtlbufHVy9a+8MITDz8ZGH0ztz+6rkvRwik7jx/9uvYXOl168rkDO9cdHDrMxadOjp4JdeH58+TwUe3PdwjzTyuAV+nMVnPIXSSSgNxKi/knG19f685MQIjoFoE5bZk+J6OrCinJLmSK6gPmtIPfgWTQUMHkTmAampkGGupzAgS0uYE4c7EiyIoJqZE7E9BEvykfAI2UCgYKbo0RQoqak7mCpn3cf3lxenH5wLWf9dg55cDx3w+8o52r3Pv08m0vV03fHuBS6OQG2qtNRklGWsP78weO1H498rn2I23f8PGv/3pxW92cu5guDAAdRV2II51JxIwaik5bJWie9gLFXIfpaixFg8CnOlAHiRk2zRfr0cNKeVOwyE08A/jXT5zNtVXacqn5C/GGsjLtx+gebemMGXQq91dqIoglxwA/7cBPPwlCjnw/ifiQo8nAUQuu2wE4mhPwWYCjObiFjoyjCcBRCR1AJhwkuNQ04KcbDnPxXBwwuBOcyM0ENGnhfckBJ2MxMlx1E3ACObLq5OF3B7caJxXrULKoGZJkNi+AzTfnsKfZ8ZiqRfcuPvn3Xf956N5FL2hnP/hEi1bse27FgbefXnGg3ZYli7aqCxdvpgvm72nXVrl/10cfv36/2rbdnnkHPv3kwGNr1z360JYtXMH8Vavmz6l+HnVqKPjNfxk6BejIGot5LAJkAQcS0qw8cCBBatIpbz0qFIQ/JRBSTV5dp5LRFdhZymV18LpmyVb9XAK6BzUL9Yz4dKIJi5BeAkaRU5RGWQKBuJkzcLNO7FByftenmnb6i4Grr4vvu2jwhgOFNZPe+m3W5uULtmVtX/XIK/zuozRXO6md1QZHtfq09DEZKV9/uHzEGOr9cuOxRSUrP/zytG47GCSCQldWD+nQhCYYIEAsYUbSADshlAAvyBCFpRFR8PCzculSwBX83xBbcARhTo7QDWKyhXQiEROgalXCC1ljAEkxh7D8IeH1CljR4AK0ZMOXcYCY0pbGMJOwAq+u28IMfgn/EVydgFf1UZPPT30D+O7RlRMmcGX099F0xhztlxQpRTs9B/fzFN3Af85vYvQl6UjLqlNnZdQZxKCNUPh5iu/TsJvvQzeMG0dXjRunrzkL1nxHX7OokBYV5lBYeRZXOWFCdAk/YMYs6k4GL+CcqT04mvH0ZjCi65nupJFJJJKMPE2xx9CDrSV6SNfRg5uhB4CiSnIIzaU2zUu6C3lKXCOkYElsXBLoCh8PhuKRVYsLHW18CjpaKe4C8OCgviB42Bh4MAWRqzfzdRtq3l00o1dyBc29Y8JdS+bcD1GHtlkmlLy4+9DmxR9PLRwx6oG7byt/Ztq8h5fed279ypVAzwytu/S5+DAJk2vIFhJxYrXCElaLxHolLaR0KlBzHfXK1QWqD35lFqg8Aq++zCRyIOfO0X2sBMlEP70ydNW+s1P11KGnS+m1FzzLGSVpL6lJSu7ZC+swtPGIhZYcsCCVtgWaA3Jvi4WXM3PzOxV2w+KF5FZNbZAJzlz4TId88NVXFwE7EhINdrhJIIPwEsYYI/3s4mauO8xLzJ70D3AkAMd++EQGofobPWiRh/n3GW76Ga2gi+lS2Vr3wcB75MLnyh5Y4vGf2Dhyaj+OD1lvKnr0RZtbU7Sntb9rI2QPnUhvHlLbK733B3dqC7VRXLHr1lG3P9KZFmQM7PigQr+mGzlJS9WGHNb2lQ0fNfqXgxoNFxZx0X0LR515iy6i27R22jxtkdahfbB/u470Nzp11au3T4UMlsvwJ/0M8oCsXvgG4oEJMqH2us0qfJgFhVrJTCi4JQlxQFwBy21UipHAigVMAPdBPsB7AkAo124KlzXr6Wjp07u5G7WvJVE5exN9WhvHUcg9WBzYA+ssZvmhH9Ycb3gHJ3hBFn8y0Av62XLMCwaYyJ3o/kMAJJje2pz1NaLNYwYDgPMpYHagyG0o/slCKlH9TpYioi+ECJuhY3JIxJojvayA7uUDhbGDPfSl76JzJy7aEP2HNo/Oe+HV6jXaRDqoasurivaBqOzZW74hI+HQwv2flK557IGNpcsWP7RMt+WFENs2g22mkrGGZXqAHk8yg+jxgKsYaIgDPBwn4Lk4CxppGiPNBSS4WPVTsYQYDDaF1HQslrhA+4TkYqRClRJRIeM8cMqUoFeNXODVBUj9UZ+4VOp1o4KF/RLEM7KQ5v72I3V5uPKEd17d88MPe1495C/nPNrP3/+m1XGjT9J4OvqPb6Tte7XDP5z6t3Zk1+vSl+fonehnUD7vg3wsxEM6GtKxxqTjwdDsjdUiFKsLUQHzIz7dfcug+FgzCAB3SU/amSBXq6mNjtDWa79DutXxMPVrP36ufSQq2nNa/evaj1pVKc3/Yfdxms94iesPhfVt5DpjdUtsdQF0Q9RVUeSZKuJGYmk4S9EtgFQUa0jPx40kXE/A9Z89/FMNx7i/R6/hg6JSFj1aFl1fShrXHcXo7q2ve/GaJj3itLamsaDtggX38C801HEHoj1wsbfujt6ur7Uc9OUD0JcMrKmlxfSlFSWpTUhMQ5DJ8uFAK/qCkNMUisQzVYuHNIvZga46aaA6yTKzhwRQHCW5WI2DNNFAmy3Uxyfr6iODMchMg5bTwj9+ohYfNzlp364Dp7T3n3g3S5tNz3XSogc17XVuCMjUQW/9aZe0fLt2/Gvtt+PaVzd3pLPKomevm0mHNfG0nsnyKsOjmHSPoojhWivPuGptkqSN9UcUm15lFljDpFGG2IAJQ64DTK3ge1RUNBwQleit3OazN3FV0RJ9PUi+6M2sBhFoJsPG2gVcDX/ExiseqUT/pH/3FsBmKnzXg3rnaMyNHI25kYVdCpTfHctcWQ5k05Vfz1UcwGsL5CiKu3l+AithZpmTXdj5Fq5843OLNlee3PV+xVS6TKpat32F4Dl38q2fxpXtNcd49jPzjzGeWZp4xtsZz3j0jM7G8ggXwooaUXm7nlFQPaNACsE5+y0U4nQQ2PYW13MxF93ALeIejT7/NrCvhKsSo8XRgMhtiQ421jbB2mIsAuBKBg+lGA8jPNN6XrTEKphMOL49lRwY9dntTfYkdYRryeQ241qmuHAjJbGKJkvsdUaa9AKkKhPGSMUs13BinB0jskmv92F1JcLbHCwKM9ooaoQnhwapySPvWc35JS6xqsIqRb8bHD0u2WA7msiBhjzAzebOakIDjS6Jzm7SzVNMN6+9SDebKyRoo2Dszo7ixt1xLGszG1tSeUtsQ0WootQk76nku0ugowchAJ5Lo8I/z94kHKfnUsG/zgLb//7Cupc5VveyXLHuJdj0uhf4/5ivzSAeNF83+Fssgvlm0Y6UUIF20d7VGs4T7cPK+o8+O3nqHx/9iK4/kY7U1mo/nNS+19bTETTpZ+1bmn7q1AmaoX17QsfvyJu/sfqFh/Rp7g3B/9dabEwHLS1DgS2E0cCJBV4jGqgem9wy8AYDibQp1v7+r3Pn/qUtoHNqt9du1xaISv3efT9G13H7X1n28Gv6Pmadby86gFcesOebSURGXvljvEpDXrVhG/DCBrwuNcngVRBLE17Muh2yjbWjZEiMABXIumalyaBOzVjo5Ux+UxbDaZdg5MTSs4O1P7s/cP0lubleOzP4RP8zqakXs5Qju4CfH4nbALsHSamhbS5d29QgsDQxmbE0EVmayShKAoqSQ0qSnvmlM/SuiCE1C9UgSTfzOFmRgapEomMd5uqV4EVYB6BBvN8Hfp41jZqJYBc9+e+zD85YXJGRNSMrbcsqbSy9++CO7a9oD4nb3j847ZXcNtsWLu07oU1C5oJrFz24KjqJ+3PN4sdXge1gLl8JculAyluv/2GTUU2BUJYi47mUhJYdxvbNOoytNBTN7bGmZ5ODLK/FJmKNw5fVvtUWYmY45AdCfaaWLUQhKKG7HcNN0jZv+Sxy9NQf1HP4nw89yE/6UN12cMc3P/2ufXf0i7VVdIX08voVsyue6dZj77rqT2ZP3yqK0vJdz02b9GTXHu9Vb/2AThp3SEJ/0QFk+BjDx2C1UvN6icKHWEor1aHuR0RWmRUBFEQk1naVsILXlBFiL6CDUKLZKrFScnaHeAPzR9Ws14b+skjPhlTJ8L2KtdFd8lgkdOHFWPUD3SWkLljsZaVwiDONAQfLGtWVX6m1xyq0o//+QTtGP+O/bMja+e6h1/H3zw1R3Q8i7v+Q4Z6AUakkHBs1QKzDAI1KLLGiT5j6w0WI9zMW0B2pkJ9uXxD95xTwcdeOHi3shFBKSTH4fewD+EitXuNRnGF2yQjFAACXjWekUEjVqUuNww4hyl7P4t7485erWVufuBTfXofe/9m5r+rkcaOUmO9Q5L2q2XdGVEzwxuyfb8FqIsSQGpfs9ORF4LVZQbGGM7tklv3t4Exmp0v2NXXlKaxthGziQ8fKvDiQmE6RRP9VFAmlOUETDRbPpJb2UhHtPIV2LpQKqGmG9tAU7bVsKUvbMRXIP/EN/VbwnjvxT/wFvv6OZ589t07nb3fgr8LiTLZh+eYwKwYbcUbPpjiMI4KVxREL1f8PWmh3elpLfoI+S1c9oaXQ049pt2m3c8e4D6LLuUnRUDSNWxCdA2sEYI2dsIYZEbupUYY8LGApUEx1DKFbEambWPQCivUDpBfWooirltG9dP+y6MkKUWn4nG/XMCZ6gkvWaYDEQBjPdCQ/FstjeJXn65sUxaRXqAE0G425cCENYBEk4LuTH9bwBv9xwzp+9gjh57K/noszcMI67W16UpoHdlXIKimA7LGSQvlYnajW5CV2IQ9RDphX7C8+FDMpgB5BOexbR2/45BPtbdOrZWe8ZXDdjucf4MVYP4q07EeBkIMd7+NG3ScqZz6FzxLYQ3+2h15EMRXoRl2A2J/twVQHy9VK+sKSS6VghRTs3RXbjClW8fFB+AcEHfj0U9pf2/6JdKLsz+uxvsQd4RoY/xp7YwbLYC8sfQYt4wfQvGE0d9qBNCntDfjC59F29Pi4cVqKzid6fhU/lWXQSc2wGR40IywM7oXyUxoeK2XfuUPYSfeLB4hA2hC9AcELxIWdRZFxFnLyOAG0Qt9IUdgTvINbeeg+cY+o/YHx927AxG8LAyFq5ZMTemarJIUjAVw9xwoZLhbizBDA+PYBD+JSLNIUMPPGgm2mS7Ghp2cTAECvG09hDTcipOaGQiFI0zGtVzsatn/tb/2Z7SfnC0rqXlFNij8jKAl7d+799XcLs/IEV01iQpInT0l11aSkJoO5w59N5h6Bc8zqExJTUmM1n8SURnvPtLNBFTUNgEnEE8hhzTI+AJbnx1zJLEdszni9xNM5s3usQVYAJt+5iFXAwL36IZAWNp85KITP3E35r0499eDsFydxk6Ztr/nC7pwdZ+3x9uyqbRXTx89/s/1/1u2nGU/XPjht4ZzhVJKkqcNG7Xg5eqJ4QmHRTe1uK9+4dMjk6SOPLWOYZzXEAUlKAE1JJ6MN7GVHhvsA+EjI8BQ8YH01iWJczWAMd+uJgOyqV9wuNQHnwPTujOpG2OPSywh2JDkF3Z2LN0CrzDoNst4zyTF5jPowIiDJtLqyy8Zp+7/66o2KzYV2ue2a+1dXPb969rNZUkK0cvhd2jta1Peb9s2dQ9fRjJGTfzzg+5Dys0Yz3RsNuvMO051RRNeYeNDX+ECsSBkRkBYnYAQnS3edNqRFRz8eoMXjUhNBL+JCaqqM5V0GfRKxACIEWHEuHg7NqcYEjbslDEDMg4Ew7Pf6vCbIvbjRv34Zuf9ebvy2uVurNygVO8ZxlbPXH/0PZ849QTveU7ZOEqUFq878PXfvn0umS5L4aEkpLWDymAx0fGrI404dr+vhGeUhxOQhMHkI5pbyMARhsoGux6SR4EYSnKBvVhmU0ZBGnMko6rBCImYROc0L9LKepU/+8sCUDUUV46xdXr5335eVq6umrcpr9/T0qjX0vI/ytGjUEG7BmR9X3z6CBn478OPYEbRh5H1a9ENGxwig4yOQRzzQMYxEvEiCXTJISMWqm8UrxKpuGc1LPIlG+oO7T7QirLZ7/Swtk1WXjLKw2FGhZEMWhE0rBXz61rH+2YZ4/AHdnEZQ2+63jkeFfVXlVV3DPV+f/67223yOm7Hh0UW1NFr0Iw01fFKW+sofvbrd0rs/bU8nimmP7H4X9KkPEFEjdSB+ciuJxDOrwPgjWQAk4WykHFaJCGoDWCyhQIlnExo+rJWEmk0URuJ9TP8QkSVixJLQJVjYvsN6W6ixAacjtT41654M9A06E8JtSsZSTtMq+cMlVesiVstdkmlWeVVJQ1v+MNMTrT9fB/xNJXlkmlEFDIBmmGFzOpPbmpkb9GIVtT1jcBrsL83FsE9mKMZuNl1WoHYAbqcR3XL9co0g25ONyToTcDwZ0htA/2pbe/OKIFOeIr3a0HqnJ6ZIRw/eu7HIUfrDBwOVPum9H7256oWijeX7j1Y+DyqVm/PM9Kq1hkqVjthy7h8f/5odKM0I7Fi75JahtM2v++vH3UH/GFmpNXygx6YqCEtfgI14yAAD41jDuq9yoq9yNvkqb6N9cyE0cZvhp7CCYvMw1ACmTQy8GfNO4HmD+kyHSa6q7FJbuemVymUzZr6YA27ontET/vFNtJRbrTw7f3xUYrq+BTaVCfthc76x/BWVBAOl0KIB5dQbUM7GBhQsiQ2oLRUVFUK3c2+K5Rs34jXPP6L1p3lwTSdQ2ZUwsaI0BQvAFZdCMc5hT99VoMp2PTMG2ODSpeoOGfVRXpdJrCKUje2Te+2urr6hYyqefzStkAoV2shS0TqzUnjy3MTq7VZTeqxHtQZ4jHNljlhdFOtCIs6X8XYiYvA11Ud4OyvNMFZfuj4ktlofWlM5hy5/mNMG0a/5pVr/h6SEhpH0gKglRF8VOWf0P7CHJr6mkEbo0XppbUuFlHDmR/jOCsgH5oJdZGGuyHCLKwXrQGgWqCJKXBjtRPGB4Wazi2Xp2pHlYkUPVuJng6hY+lRzcDJE1w8lVQZ1UVLQgBVZVuN86IsCLSoyfqY+/guUyNtcoVaMt3XeUjmrOrPT9gVbdlU+MmfZCjed/tjsuU+lCd1q7hxbOXPq/O//E13KTX/7xa1LTElStIKbfuCl+ROj5pjuHwH6Wuh+I3VoAJfXeo9BjE2+SPf9F+n+OFtndbryauWyeXPWBIVufx8z8fPj0Ync8p0rF02K2pnu48xmAuznorkq+v83V8X8OEllXWNS1KIsAhjm8BEqaecOf6Gdrdz9cvWevRs37ubiAqdwsupU4BftQ9rpl13ncZoq8Bo6TaOes1obJYiwN4ylQ4kBa6T6ZuyCWApJQCwAybrtcC5WJGyOaWRO5xpgGrt0AabxGJxrxDSJtCWmKXV22cRAzdRNXdqtmrZ63fqq6c9ka6PELzYOK4lhmttvin7IbRtadmK/7wMq3DtC9/Gj+A+M/d9pZOm4/yYfnwKZg63gAgwA4kaY29K/IxW2RixglplbbwULFGGJs3UsMLm6S9zYiqINkxgWKH+2fbtn7m3EAnfcvuZsNpc/6FbEAj+V/pVzD52infsw5q+554EOF+RcTd5R76vHxYGKyI2tBsizcNrHjf4jjsTuWQAO+3TLMuUwxbzHWVA10Z/ncA2d8kS60K02bky5SSiX5k6O+mC9SYA9VsN6Hci8S9SL6GXrRaT1epHPD7gKC0YOI+80p8vuWjFODuI0mJIlKwmx+hFx+BpH0HUXHBtBb71+xMr1RZ0Bz5vUygVPz16377WPN78yvoyb/My8Bx6Y8tIbe7+sfbN8PKXtpPvGTb35xqmZuQ/NmbVp2O3zAd4PXTjlxv4lWXlPzVtcPXLoDInxPPv8T9wUcRDgl9tIxIM8iItBF1GHLqbm0CXWYYpvHC6Nt7SELtgMRHBAZMWpAxhZnwdrhruyC+Xs16f//POA3qlFme602/OmzgX4Qn3aTyXRq8YNFaWhdsfjz3FvwP5Wgow+F7rpfgwtUy+3SmZjk1iE8l5QhFLsrDDJ/BirQ8msKoklFSqx2kqzqlRRI6rNXlm5eNaStRmV46ydlcpN++hb3L3RZW9unjGe5869qd55N8aN9uBX98N+mtWl6JXrUu1n0dyglE2zZ2mlo4RuDZ/NncvnnXsTvno1IeIBuJ6PfGPMHjmcEIfwojXUhH2GVktT3sbS1L6bfj7dSmnqtxPvtihNWUS9NNXzvVND9XmEOEiD94qKHSead+7bd/IelsuaXDVmkwVy2cbSFfzZLJeFc5jLbufMFptew4J8treVM8HfjmaVLCO51YtYBjc8wI3Yq1FcCF4961A7Kfz93d93ljocnKUdLPulQOp44m6hWzTrjTe4L6NZb77JfXnuTe74669HU4ArIeB/LfCrZd2K/nd1qxCdqz3xCA3SrEe1J+ich7X3tPe4HM6jXUt3Rk9Gj9D3tTCsEQTMfIjJxJiVh2tjh9UeVmVEyfEFyHwgTW4uaJAz0yID4F5Fg4tou2yJXveglpv74HxfD4cjrjBu4MhAMSjAT/P5p88lTlppEcdw4uS/Lme2iDc3bGG61aKehU6IN/139axh3MPRJbwzOoXbM4SfeffQhoVGPauvNoFbKfUkaeRGAuZc63eQRCGPzQhBbLMU1JrZCTajk8wwKHYvIM3NYJT6gZ8ebPpTGY3b4lZFux4OWABjdo23gsQK+ya9rt/3/imrXkmae9/wO+4YXjEv9ZVVU7j0sQ/OPL7pVNGgdoceOz5pbVbOuonHHjuYe1PRyZePzVjK9hrRfqV+ViNLIS1bpa569mOUy8ByI6Xar9LuM33Y9yxA450xGtMKaolOo79AjQcaHQW1ziYa+TrFqvep3QaNfhIbbIjHqKc43KrVzWjsRRmJOkkoXpbH+1g+L5kscytH3nXXyPvmJu14rryionzVK9qu3IOPHStfmxlcO+X44++0G1R0atPxGYvHLp1x7OWTRbo8HqPVQj3vIYnkJoLo3GKtR73iUb+SGLHGXWnM3IHmZCyuJyKIZJNQFuylk0S2W1XywG8eQrTdmCbEEKjHE7+edLHk0fdY1cy/Pjn0qvHFAyaUrJ0+5IkhvSd2HXQP/eKBHTfcWByeV+Kcv+u6QV0Kp4/R9zjjvI3/TswmQTJDr5UoaWE1XqyPBJj7D2QY5RK8OcEJpwWWUQniRRWTDL1vns6yGoyWRgklSa5HKWAJJT0D6MEyl15CqbHaEpP1yFjY2d3yfqymKko8uyUrm5vxwd8rq97l+cYyynhO+MdTlbvf58y5R2hOwldfyu+tblZIWbrP/d1xP80BGvH+wo7sXqJn9fuI1FRIlxJDEQnTeAdfX0toimTPU9xhVn/1hmpsKZIZKAyy+1Nk7DwzdMATnLfgUyzoOxUfYoM2QHCbAoULs5QfFC0ePh3fhgVML346Ppl9Wkfe7no1E6ck0KoTEXmrksMAvWGeybTxjjScKQbJmnBmPtyLFuZc867tH5HXd/F8+dLK2U/Y6D7talM4n6cNg63XXmviFpTRtu/Vf7hV+ttSZY12uEwZv693aanz+0ol1kNaDvYWjxUCR7M6fa1LdhA7G4BzIYIM1Xp97ARAAy+vQwM/wiGkzc7GHSN2NppgtwFhUijiYJmfwwV/eUMMKtsdsVq/r0WtH0jx6bUNcGX4r8MyWk03LtOK6b3acPqiNrxCv8GQThWVaAfu06hctq1M20mvhV86jl8revgs437XHiTWNVeJnWEWvS/WOOeJVeYErNizRjqWzOGvxn5YGBnrW7uVtt0ielbDf1jhHn/+J/EP8QDEHj8g1FV6/FedDmPa0QcHmQwx4gGrvGWCidSG8yyZkAiH4WxemN3wWIAW0oXtIs5F8vTRxwT9Zj2lrUvN18dqO8Jf6SGlowtxbq3EPqkW4e19bWX3DovTx2emhPXx7TzZvV2Kc6eTjrrR6C1kvQnf7NiYMW7NksBLjKdVtC3NoVXaaO0L7bBWchudSAVK6WRtuaZpDdqTNGnHM09uELjhk8ZNmjVz8vgJwznhxSef2cEdod2pot2kHdQOaANphPbQ6rW5dD71Ux/E3PnatorNn1c9JU2ZVD2/cuGLE6ZJT1d9xmQ2k6zle/ObiASZIU65YqA2fs2kOfdoJ6j3HkfsgEv10JnaTG0WnWkcXHB/EWlx9xCoNSkDmf1qyCxEuuNM50VSqwWQgPPNeNdlJyahToD0lbah2sTu7I3ExvstL5BXCCQUDikhFxNLu/YA/FPBVwfbhkJKagux4S2YRSHIA1BsGXh7oTsV9D8HhNcJpwKDxUpYrgUREnxT6Y43GFxGjpfoo+fRRBq7naTMkOYakOYRXZqTIAPj6CQmzai2HKTLPVn1l759e5gtZVbhxqG7tg8aP+Le568kzehA/pY5M/relZY4rn/Xtn18Lt/NuV1uvUF7ju65+frb9L7xNGEXPSK+CRJor1tiLblEj0flMfByen6fTMN+ftqHT/Jn4PtWSWvAa5VoA+hKuKoTpz5MDP7H1SvOWIBnd6uY6motumgsLpU37s5m96dIRL8P2CTrFVU9ySoKG/OWJcNmDh6bekfcoNFVT2qrenYv7mCe29syaPDwiUw/F4B+DojpZxE6Kh/Dk/BrAfVqJ+6hOdqRTxqP1tKFdJG2yKMtajzQ50vZHKspnc2xui47ySoX6Gltq5OsvAf4c9E4axEyrPlMKyU68/SZmaGwLq56xclF+UqTi+6LJhcpbqjZ+GL0XX0vxhCj5DOkiLw8BC8FsBeBmEkWiYgYaSQG7ywFiljHCj7YDjaLLKE31MFGAecdwqveUWlc7sxPxoAcr88tmTqzulIG6dnq5FKgtcpSm9g90YKN3RN9heElRuelJ5joZNzgFeeYuC90dgjGvpONe7+DpKyVnWNJLCOspkL8CoRikMogIwVcS7oewdIZwKoN6n8Fm0hEXJWRjiTKCbYrkxiLepemcjbGwysSyeezgMnpsyMgbxmQRffWpkf8rU2PJBhZe8Tp9hUXtz5BwqTRcozkLRTARcMkYodG/eON/YA/gMwukZRcvCMcZ4kPqx5gOD4dIqn59tCX+3QW+9ica22i/ldi09YRo8djrcwpXWLjMR632PtnyNaLtz4/hjtYv1v8GvQbrI/8j37Xl+IP6zO6mdb6iKux490uzRXreHdi2w/A9gMXd7wDLtxtREjKwY435nq+kBq6oOOdkC8oSXtF1Y8db1+zjrfPVRPv8+uPpEhMSvBgB8vfrEoA51jH2xefmKR3vP0J8YmNHe+A0fFOtgFscaVltu+AsEXxymp+AWt+411C3mSj+W33tNL8zr5s55uFkWbtb6m+ttX29x9MaZp64NP3tNYA52+OKRGv9ytBFtivzCQjrtSxzGqtY5ltdCy3Y8cyI/i/7VkyIi/XuDzHqLtk95K+0sw3PwuBVhPfbumb6X/lm5/VfbOwm13uXB/sT5HYcxoSxKMX+uYWVf/L+2bjeRVXKPwzb9B69Z+2ZX75cj0AbkPMJ+v7PdDok8c223EqeohAGO9tUjJCzQj4v/HKlyYu5jFap68L88iXJe+s7kbw/jespYKMPSQB51YvUU1NvEQ1NSnml2WvHwzyv6qoMslcWFa9k6nlRcVV/iddDryxT5x594MkFly4Ux+KIhEyUDuO6TRtPCW28RovT/A24cYEr4mKmuQ4C7yVoL+VUFCbrOd92GdKwCKXLOm3J1yRtJhcLqBuIvPlFxEn9GZSiMX9UUzHAiSHXN8qYmnbmlW0M6xiByKWNsFsfYRYzcy64uQ18xTBInilwUtH91/qFvG/l/1KzU9w2uEpVw7zNiqCvCQq6E7EsB/JcjFtLSz+8rShxbdC26XtozltrdvISy3puqyxfN6Sphhm6A+YwU9ScSb/YhST1hqKSTesZTugmITEFKQnTlaTki8HaAwqWuKa61vs/mKUMLL5jpntCFbxNMHKYjr2dC5h5RmXsPKAse9asPKkNGPbDtz25c2huRguMIlvW1JwsW2ktGA6Jc8Lx7l3xTqIRHns2Scie76YLOjBCJJH0UvMYLTWWKlfv3eosCgMiXCO6fnvSr4vr94gHPcd/dbNxiTA920SltKz4iesDnAjwYK3XgxWfAW1vJFGJsQy/CQ9wzfSd3wmDoZudxz4BwuPrPBByg6JZVO11dfsKUh6dN5017V9S0b3u65kYGF2VjiclV0otu83Gk6MGHFdTudw27aFXZDWMuEUdx5ipAd3BdhMEtmwBi/G+vO1Hj2t9TAx1Vr1cgJrbeHUGc9G59i8EClWeZeRM+q7aioAI2gqmzD46vWF+X1umnTLDSu7FPQW6e33Tbq+yDtk2qRru1y+jvK/f+9FbqvwHST7PPCddRv4en2ItmnqFb7yotCL21qG87FLuK3i3it+fonY1fj8cCFEZfZco8Zn1MSeakTY4Dt7Ro2o3x7Dvu0J877hk6+7SghtpV21t7fq+7zMdS7zrJvhV1VMhi923FGjvW9c53wHKlH+v76Onz3+bnjnijGfUut7+zS8LwP2wpmNZ+z1YRZw0RP2dNoU0cUqKDbjLiCDTEWS2egGu+k0RnK4kfB5zYg3WKCvab/8msYt7bHH+RlrGqRgeUUqVqzslqiWz/ZDJm1vxiiDXTgT0oX+Qd3/V2vqrDTWDFeO2di5cswhmrN9m/YpfAde0Z/jPS93s+cJYSWmn1EREczhMD4KQBUtoVCzpwvFxZ4uZJSJ8UkHism4w87beBegAQXwZ9dSKi8l55euZ//pOjGBrKUNrIYUIFQxxVyYTZ8XN8cEJ+jCYrXPCReVPOE6pXCd31teR+FCxqWarkPxOkapqrSVyhTb002Asd4TD4KHhXwyBwnOMB6dptjCqszjhGItoTlWO8Na2PpIxmcpshP4GEUeM8YaR44VeyHtC5TcOpWTsP4JMvImABdTc7F+lIodjvhQJJc9zSWXWLAThLVRlGOHZg9pseNDWuzGQ1p+nfzGNL197WAPabFjr3rn6bq951j6aXPVxEFamKe4XDVOlwPST/izWfoJ5zD9hICGqactzulq1o/OYNVWfbQyiOOV5ILxSvavecbVk9700ksvUedXxZN7W7pM6br5bS4YPYo/724qLu9s6XJf96+0U5yvbGNZ1mkadDnHuTw/vpUDf3rePCHLY50u2uZ3jx6HRvHPCNew+3X8pFKvjELOh0+w1MMR3/iAL3zWjtnpgfScRSapzng+W+t38qArAA2o9evRy+/C2bpaZ1P0ciG6tdoNPBVgD+iB7M0D/+Aohw/yJnkUnbfiBtpx5CZp65C/SM+HX5TE8f36ae3pP7T2XKI2lFZHf6BzqTaPPka1qUyPEPh1Zc/UIJ3kgIzH597+f+LPPhMAAHjaY2BkYGAAYqY1CuLx/DZfGeQ5GEDgHDPraRj9v/efIdsr9gQgl4OBCSQKAP2qCgwAAAB42mNgZGDgSPq7Fkgy/O/9f4rtFQNQBAUsBACcywcFAHjaNZJNSFRRGIafc853Z2rTohZu+lGiAknINv1trKZFP0ZWmxorNf8ycVqMkDpQlJQLIxCCEjWzRCmScBEExmyCpEXRrqBlizLJKGpr771Ni4f3fOec7573e7l+kcwKwP0s8ZYxf4Qr9of9luNytECXLZJ19eT9VQb9IKtDC+usn8NugBP+ENXuK1OhivX2mJvqmRM50S4OiBlxV9SKZnHKzTLsntNhZdrr445tohAmqEsfpdeWKbffFKMK+qMaijYiRlX3MBRNU/SVfLQ2jkdrtb+DYmpJZzOiiYL9kp6nEGXk4Z3eeklVdJYpW6I8Xcku+8Ie+0SFzXPOfeNh2MI2KeEktSGP8wc5Y7W0WZ5ReWqU5mwD9f4B+6xb6zxj7j1P3eflW+E79+N1ukyzaV9kkz71+Beq19Dlp9msejgssDW1ir3S7WKjOO0fkXGvmJWujHq5HWdvWc0/pNxfUxWKTKRauBgm6YszTnXQ6mvI615TGOdaktNIksebePYEzZrMG88g326eeyVfMcMxSU6qk3uxt0uMy8OTUKA1PIN0g/Ioqe/W//BB7P4Hi9IeabvO5Ok/0Q0mU9cZcJ36T2IayfpmcUHU6a0K5uI+30inaIm/adUcsx802E74C0holcIAAAB42mNgYNCBwjCGPsYCxj9MM5iNmMOYW5g3sXCx+LAUsPSxrGM5xirE6sC6hM2ErYFdjL2NfR+HA8cWjjucPJwqnG6ccZzHuPq4DnHrcE/ivsTDx+PCs4PnAy8fbxDvBN5tfGx8TnxT+G7w2/AvEZAT8BPoEtgkaCWYIzhH8JTgNyEeIRuhOKEKoRnCQcLbRKRE6kTuieqJrhH9IiYnFie2QGyXuJZ4kfgBCQWJFok9knaSfZLXJP9JTZM6Ic0ibSTdIb1E+peMDxDuk3WQXSJ7Ra5OboHcOvks+Qny5+Q/KegplCjMU/ilmKO4RUlA6Zqyk3KO8hEVE5UOlW+qKarn1NTUOtQ2qf1Td8EBg9QT1PPU29TnqR9Sf6bBoeGkUaOxTeODxgdNEU0rIPymFaeVBQDd1FqqAAAAAQAAAKEARAAFAAAAAAACAAEAAgAWAAABAAFRAAAAAHjadVLLSsNQED1Jq9IaRYuULoMLV22aVhGJIBVfWIoLLRbETfqyxT4kjYh7P8OvcVV/QvwUT26mNSlKuJMzcydnzswEQAZfSEBLpgAc8YRYg0EvxDrSqApOwEZdcBI5vAleQh7vgpcZnwpeQQXfglMwNFPwKra0vGADO1pF8Bruta7gddS1D8EbMPSs4E2k9W3BGeT0Gc8UWf1U8Cds/Q7nGGMEHybacPl2iVqMPeEVHvp4QE/dXjA2pjdAh16ZPZZorxlr8vg8tXn2LNdhZjTDjOQ4wmLj4N+cW9byMKEfaDRZ0eKxVe092sO5kt0YRyHCEefuk81UPfpkdtlzB0O+PTwyNkZ3oVMr5sVvgikNccIqnuL1aV2lM6wZaPcZD7QHelqMjOh3WNXEM3Fb5QRaemqqx5y6y7zQi3+TZ2RxHmWqsFWXPr90UOTzoh6LPL9cFvM96i5SeZRzwkgNl+zhDFe4oS0I5997/W9PDXI1ObvZn1RSHA3ptMpeBypq0wb7drivfdoy8XyDP0JQfA542m3Ou0+TcRTG8e+hpTcol9JSoCqKIiqI71taCqJCtS3ekIsWARVoUmxrgDaFd2hiTEx0AXVkZ1Q3Edlw0cHEwcEBBv1XlNLfAAnP8slzknNyKGM//56R5Kisg5SJCRNmyrFgxYYdBxVU4qSKamqoxUUdbjzU46WBRprwcYzjnKCZk5yihdOcoZWztHGO81ygnQ4u0sklNHT8dBEgSDcheujlMn1c4SrX6GeAMNe5QYQoMQa5yS1uc4e7DHGPYUYYZYz7PCDOOA+ZYJIpHvGYJ0wzwywJMfOK16zxjlXeSzkrvOUvH/jBHD/5RYrfpMmQY5kCz3nBS7GIVWxiZ4c/7IpDKqRSnFIl1VIjteKSOnGLR+rFyyc2+MIW3/jMJt/5KA1s81UapYk34rOk5gu5tG41FjOapkVKhjVlxDmcNhZTibyxMJ8wlp3ZQy1+qBkHW3Hfv3dQqSv9yi5lQBlUditDyh5lrzJcUld3dd3xNJMy8nPJxFK6NPLHSgZj5qiRzxZLdO+P/+/adfZ42j3OKRLCQBAF0Bkm+0JWE0Ex6LkCksTEUKikiuIGWCwYcHABOEQHReE5BYcJHWjG9fst/n/w/gj8zGpwlk3H+aXtKks1M4jbGvIVHod2ApZaNwyELEGoBRiyvItipL4wEcaUYMnyyUy+ZWQbn9ab4CDsF8FFODeCh3CvBB/hnQgBwq8IISL4V40RofyBQ0TTUkwj7OhEtUMmyHSjGSOTuWY2rI32PdNJPiQZL3TSQq4+STRSagAAAAFR3VVMAAA=) format('woff'); -} \ No newline at end of file diff --git a/plugins/UiConfig/media/img/loading.gif b/plugins/UiConfig/media/img/loading.gif deleted file mode 100644 index 27d0aa81..00000000 Binary files a/plugins/UiConfig/media/img/loading.gif and /dev/null differ diff --git a/plugins/UiConfig/media/js/ConfigStorage.coffee b/plugins/UiConfig/media/js/ConfigStorage.coffee deleted file mode 100644 index b1c6e4fd..00000000 --- a/plugins/UiConfig/media/js/ConfigStorage.coffee +++ /dev/null @@ -1,152 +0,0 @@ -class ConfigStorage extends Class - constructor: (@config) -> - @items = [] - @createSections() - @setValues(@config) - - setValues: (values) -> - for section in @items - for item in section.items - if not values[item.key] - continue - item.value = @formatValue(values[item.key].value) - item.default = @formatValue(values[item.key].default) - item.pending = values[item.key].pending - values[item.key].item = item - - formatValue: (value) -> - if not value - return false - else if typeof(value) == "object" - return value.join("\n") - else if typeof(value) == "number" - return value.toString() - else - return value - - deformatValue: (value, type) -> - if type == "object" and typeof(value) == "string" - if not value.length - return value = null - else - return value.split("\n") - if type == "boolean" and not value - return false - else - return value - - createSections: -> - # Web Interface - section = @createSection("Web Interface") - - section.items.push - key: "open_browser" - title: "Open web browser on ZeroNet startup" - type: "checkbox" - - # Network - section = @createSection("Network") - - section.items.push - key: "fileserver_ip_type" - title: "File server network" - type: "select" - options: [ - {title: "IPv4", value: "ipv4"} - {title: "IPv6", value: "ipv6"} - {title: "Dual (IPv4 & IPv6)", value: "dual"} - ] - description: "Accept incoming peers using IPv4 or IPv6 address. (default: dual)" - - section.items.push - key: "fileserver_port" - title: "File server port" - type: "text" - valid_pattern: /[0-9]*/ - description: "Other peers will use this port to reach your served sites. (default: 15441)" - - section.items.push - key: "ip_external" - title: "File server external ip" - type: "textarea" - placeholder: "Detect automatically" - description: "Your file server is accessible on these ips. (default: detect automatically)" - - section.items.push - title: "Tor" - key: "tor" - type: "select" - options: [ - {title: "Disable", value: "disable"} - {title: "Enable", value: "enable"} - {title: "Always", value: "always"} - ] - description: [ - "Disable: Don't connect to peers on Tor network", h("br"), - "Enable: Only use Tor for Tor network peers", h("br"), - "Always: Use Tor for every connections to hide your IP address (slower)" - ] - - section.items.push - title: "Use Tor bridges" - key: "tor_use_bridges" - type: "checkbox" - description: "Use obfuscated bridge relays to avoid network level Tor block (even slower)" - isHidden: -> - return not Page.server_info.tor_has_meek_bridges - - section.items.push - title: "Trackers" - key: "trackers" - type: "textarea" - description: "Discover new peers using these adresses" - - section.items.push - title: "Trackers files" - key: "trackers_file" - type: "text" - description: "Load additional list of torrent trackers dynamically, from a file" - placeholder: "Eg.: data/trackers.json" - value_pos: "fullwidth" - - section.items.push - title: "Proxy for tracker connections" - key: "trackers_proxy" - type: "select" - options: [ - {title: "Custom", value: ""} - {title: "Tor", value: "tor"} - {title: "Disable", value: "disable"} - ] - - section.items.push - title: "Custom socks proxy address for trackers" - key: "trackers_proxy" - type: "text" - placeholder: "Eg.: 127.0.0.1:1080" - value_pos: "fullwidth" - valid_pattern: /.+:[0-9]+/ - isHidden: => - Page.values["trackers_proxy"] in ["tor", "disable"] - - # Performance - section = @createSection("Performance") - - section.items.push - key: "log_level" - title: "Level of logging to file" - type: "select" - options: [ - {title: "Everything", value: "DEBUG"} - {title: "Only important messages", value: "INFO"} - {title: "Only errors", value: "ERROR"} - ] - - createSection: (title) => - section = {} - section.title = title - section.items = [] - @items.push(section) - return section - -window.ConfigStorage = ConfigStorage \ No newline at end of file diff --git a/plugins/UiConfig/media/js/ConfigView.coffee b/plugins/UiConfig/media/js/ConfigView.coffee deleted file mode 100644 index a110a17d..00000000 --- a/plugins/UiConfig/media/js/ConfigView.coffee +++ /dev/null @@ -1,124 +0,0 @@ -class ConfigView extends Class - constructor: () -> - @ - - render: -> - @config_storage.items.map @renderSection - - renderSection: (section) => - h("div.section", {key: section.title}, [ - h("h2", section.title), - h("div.config-items", section.items.map @renderSectionItem) - ]) - - handleResetClick: (e) => - node = e.currentTarget - config_key = node.attributes.config_key.value - default_value = node.attributes.default_value?.value - Page.cmd "wrapperConfirm", ["Reset #{config_key} value?", "Reset to default"], (res) => - if (res) - @values[config_key] = default_value - Page.projector.scheduleRender() - - renderSectionItem: (item) => - value_pos = item.value_pos - - if item.type == "textarea" - value_pos ?= "fullwidth" - else - value_pos ?= "right" - - value_changed = @config_storage.formatValue(@values[item.key]) != item.value - value_default = @config_storage.formatValue(@values[item.key]) == item.default - - if item.key in ["open_browser", "fileserver_port"] # Value default for some settings makes no sense - value_default = true - - marker_title = "Changed from default value: #{item.default} -> #{@values[item.key]}" - if item.pending - marker_title += " (change pending until client restart)" - - if item.isHidden?() - return null - - h("div.config-item", {key: item.title, enterAnimation: Animation.slideDown, exitAnimation: Animation.slideUpInout}, [ - h("div.title", [ - h("h3", item.title), - h("div.description", item.description) - ]) - h("div.value.value-#{value_pos}", - if item.type == "select" - @renderValueSelect(item) - else if item.type == "checkbox" - @renderValueCheckbox(item) - else if item.type == "textarea" - @renderValueTextarea(item) - else - @renderValueText(item) - h("a.marker", { - href: "#Reset", title: marker_title, - onclick: @handleResetClick, config_key: item.key, default_value: item.default, - classes: {default: value_default, changed: value_changed, visible: not value_default or value_changed or item.pending, pending: item.pending} - }, "\u2022") - ) - ]) - - # Values - handleInputChange: (e) => - node = e.target - config_key = node.attributes.config_key.value - @values[config_key] = node.value - Page.projector.scheduleRender() - - handleCheckboxChange: (e) => - node = e.currentTarget - config_key = node.attributes.config_key.value - value = not node.classList.contains("checked") - @values[config_key] = value - Page.projector.scheduleRender() - - renderValueText: (item) => - value = @values[item.key] - if not value - value = "" - h("input.input-#{item.type}", {type: item.type, config_key: item.key, value: value, placeholder: item.placeholder, oninput: @handleInputChange}) - - autosizeTextarea: (e) => - if e.currentTarget - # @handleInputChange(e) - node = e.currentTarget - else - node = e - height_before = node.style.height - if height_before - node.style.height = "0px" - h = node.offsetHeight - scrollh = node.scrollHeight + 20 - if scrollh > h - node.style.height = scrollh + "px" - else - node.style.height = height_before - - renderValueTextarea: (item) => - value = @values[item.key] - if not value - value = "" - h("textarea.input-#{item.type}.input-text",{ - type: item.type, config_key: item.key, oninput: @handleInputChange, afterCreate: @autosizeTextarea, - updateAnimation: @autosizeTextarea, value: value, placeholder: item.placeholder - }) - - renderValueCheckbox: (item) => - if @values[item.key] and @values[item.key] != "False" - checked = true - else - checked = false - h("div.checkbox", {onclick: @handleCheckboxChange, config_key: item.key, classes: {checked: checked}}, h("div.checkbox-skin")) - - renderValueSelect: (item) => - h("select.input-select", {config_key: item.key, oninput: @handleInputChange}, - item.options.map (option) => - h("option", {selected: option.value == @values[item.key], value: option.value}, option.title) - ) - -window.ConfigView = ConfigView \ No newline at end of file diff --git a/plugins/UiConfig/media/js/UiConfig.coffee b/plugins/UiConfig/media/js/UiConfig.coffee deleted file mode 100644 index 4ee3a1c6..00000000 --- a/plugins/UiConfig/media/js/UiConfig.coffee +++ /dev/null @@ -1,127 +0,0 @@ -window.h = maquette.h - -class UiConfig extends ZeroFrame - init: -> - @save_visible = true - @config = null # Setting currently set on the server - @values = null # Entered values on the page - @config_view = new ConfigView() - window.onbeforeunload = => - if @getValuesChanged().length > 0 - return true - else - return null - - onOpenWebsocket: => - @cmd("wrapperSetTitle", "Config - ZeroNet") - @cmd "serverInfo", {}, (server_info) => - @server_info = server_info - @restart_loading = false - @updateConfig() - - updateConfig: (cb) => - @cmd "configList", [], (res) => - @config = res - @values = {} - @config_storage = new ConfigStorage(@config) - @config_view.values = @values - @config_view.config_storage = @config_storage - for key, item of res - value = item.value - @values[key] = @config_storage.formatValue(value) - @projector.scheduleRender() - cb?() - - createProjector: => - @projector = maquette.createProjector() - @projector.replace($("#content"), @render) - @projector.replace($("#bottom-save"), @renderBottomSave) - @projector.replace($("#bottom-restart"), @renderBottomRestart) - - getValuesChanged: => - values_changed = [] - for key, value of @values - if @config_storage.formatValue(value) != @config_storage.formatValue(@config[key]?.value) - values_changed.push({key: key, value: value}) - return values_changed - - getValuesPending: => - values_pending = [] - for key, item of @config - if item.pending - values_pending.push(key) - return values_pending - - saveValues: (cb) => - changed_values = @getValuesChanged() - for item, i in changed_values - last = i == changed_values.length - 1 - value = @config_storage.deformatValue(item.value, typeof(@config[item.key].default)) - value_same_as_default = JSON.stringify(@config[item.key].default) == JSON.stringify(value) - if value_same_as_default - value = null - - if @config[item.key].item.valid_pattern and not @config[item.key].item.isHidden?() - match = value.match(@config[item.key].item.valid_pattern) - if not match or match[0] != value - message = "Invalid value of #{@config[item.key].item.title}: #{value} (does not matches #{@config[item.key].item.valid_pattern})" - Page.cmd("wrapperNotification", ["error", message]) - cb(false) - break - - @saveValue(item.key, value, if last then cb else null) - - saveValue: (key, value, cb) => - if key == "open_browser" - if value - value = "default_browser" - else - value = "False" - - Page.cmd "configSet", [key, value], (res) => - if res != "ok" - Page.cmd "wrapperNotification", ["error", res.error] - cb?(true) - - render: => - if not @config - return h("div.content") - - h("div.content", [ - @config_view.render() - ]) - - handleSaveClick: => - @save_loading = true - @logStart "Save" - @saveValues (success) => - @save_loading = false - @logEnd "Save" - if success - @updateConfig() - Page.projector.scheduleRender() - return false - - renderBottomSave: => - values_changed = @getValuesChanged() - h("div.bottom.bottom-save", {classes: {visible: values_changed.length}}, h("div.bottom-content", [ - h("div.title", "#{values_changed.length} configuration item value changed"), - h("a.button.button-submit.button-save", {href: "#Save", classes: {loading: @save_loading}, onclick: @handleSaveClick}, "Save settings") - ])) - - handleRestartClick: => - @restart_loading = true - Page.cmd("serverShutdown", {restart: true}) - Page.projector.scheduleRender() - return false - - renderBottomRestart: => - values_pending = @getValuesPending() - values_changed = @getValuesChanged() - h("div.bottom.bottom-restart", {classes: {visible: values_pending.length and not values_changed.length}}, h("div.bottom-content", [ - h("div.title", "Some changed settings requires restart"), - h("a.button.button-submit.button-restart", {href: "#Restart", classes: {loading: @restart_loading}, onclick: @handleRestartClick}, "Restart ZeroNet client") - ])) - -window.Page = new UiConfig() -window.Page.createProjector() diff --git a/plugins/UiConfig/media/js/all.js b/plugins/UiConfig/media/js/all.js deleted file mode 100644 index 0d0f2c3d..00000000 --- a/plugins/UiConfig/media/js/all.js +++ /dev/null @@ -1,1941 +0,0 @@ - - -/* ---- plugins/UiConfig/media/js/lib/Class.coffee ---- */ - - -(function() { - var Class, - slice = [].slice; - - Class = (function() { - function Class() {} - - Class.prototype.trace = true; - - Class.prototype.log = function() { - var args; - args = 1 <= arguments.length ? slice.call(arguments, 0) : []; - if (!this.trace) { - return; - } - if (typeof console === 'undefined') { - return; - } - args.unshift("[" + this.constructor.name + "]"); - console.log.apply(console, args); - return this; - }; - - Class.prototype.logStart = function() { - var args, name; - name = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; - if (!this.trace) { - return; - } - this.logtimers || (this.logtimers = {}); - this.logtimers[name] = +(new Date); - if (args.length > 0) { - this.log.apply(this, ["" + name].concat(slice.call(args), ["(started)"])); - } - return this; - }; - - Class.prototype.logEnd = function() { - var args, ms, name; - name = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; - ms = +(new Date) - this.logtimers[name]; - this.log.apply(this, ["" + name].concat(slice.call(args), ["(Done in " + ms + "ms)"])); - return this; - }; - - return Class; - - })(); - - window.Class = Class; - -}).call(this); - - -/* ---- plugins/UiConfig/media/js/lib/Promise.coffee ---- */ - - -(function() { - var Promise, - slice = [].slice; - - Promise = (function() { - Promise.when = function() { - var args, fn, i, len, num_uncompleted, promise, task, task_id, tasks; - tasks = 1 <= arguments.length ? slice.call(arguments, 0) : []; - num_uncompleted = tasks.length; - args = new Array(num_uncompleted); - promise = new Promise(); - fn = function(task_id) { - return task.then(function() { - args[task_id] = Array.prototype.slice.call(arguments); - num_uncompleted--; - if (num_uncompleted === 0) { - return promise.complete.apply(promise, args); - } - }); - }; - for (task_id = i = 0, len = tasks.length; i < len; task_id = ++i) { - task = tasks[task_id]; - fn(task_id); - } - return promise; - }; - - function Promise() { - this.resolved = false; - this.end_promise = null; - this.result = null; - this.callbacks = []; - } - - Promise.prototype.resolve = function() { - var back, callback, i, len, ref; - if (this.resolved) { - return false; - } - this.resolved = true; - this.data = arguments; - if (!arguments.length) { - this.data = [true]; - } - this.result = this.data[0]; - ref = this.callbacks; - for (i = 0, len = ref.length; i < len; i++) { - callback = ref[i]; - back = callback.apply(callback, this.data); - } - if (this.end_promise) { - return this.end_promise.resolve(back); - } - }; - - Promise.prototype.fail = function() { - return this.resolve(false); - }; - - Promise.prototype.then = function(callback) { - if (this.resolved === true) { - callback.apply(callback, this.data); - return; - } - this.callbacks.push(callback); - return this.end_promise = new Promise(); - }; - - return Promise; - - })(); - - window.Promise = Promise; - - - /* - s = Date.now() - log = (text) -> - console.log Date.now()-s, Array.prototype.slice.call(arguments).join(", ") - - log "Started" - - cmd = (query) -> - p = new Promise() - setTimeout ( -> - p.resolve query+" Result" - ), 100 - return p - - back = cmd("SELECT * FROM message").then (res) -> - log res - return "Return from query" - .then (res) -> - log "Back then", res - - log "Query started", back - */ - -}).call(this); - - -/* ---- plugins/UiConfig/media/js/lib/Prototypes.coffee ---- */ - - -(function() { - String.prototype.startsWith = function(s) { - return this.slice(0, s.length) === s; - }; - - String.prototype.endsWith = function(s) { - return s === '' || this.slice(-s.length) === s; - }; - - String.prototype.repeat = function(count) { - return new Array(count + 1).join(this); - }; - - window.isEmpty = function(obj) { - var key; - for (key in obj) { - return false; - } - return true; - }; - -}).call(this); - - -/* ---- plugins/UiConfig/media/js/lib/maquette.js ---- */ - - -(function (root, factory) { - if (typeof define === 'function' && define.amd) { - // AMD. Register as an anonymous module. - define(['exports'], factory); - } else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') { - // CommonJS - factory(exports); - } else { - // Browser globals - factory(root.maquette = {}); - } -}(this, function (exports) { - 'use strict'; - ; - ; - ; - ; - var NAMESPACE_W3 = 'http://www.w3.org/'; - var NAMESPACE_SVG = NAMESPACE_W3 + '2000/svg'; - var NAMESPACE_XLINK = NAMESPACE_W3 + '1999/xlink'; - // Utilities - var emptyArray = []; - var extend = function (base, overrides) { - var result = {}; - Object.keys(base).forEach(function (key) { - result[key] = base[key]; - }); - if (overrides) { - Object.keys(overrides).forEach(function (key) { - result[key] = overrides[key]; - }); - } - return result; - }; - // Hyperscript helper functions - var same = function (vnode1, vnode2) { - if (vnode1.vnodeSelector !== vnode2.vnodeSelector) { - return false; - } - if (vnode1.properties && vnode2.properties) { - if (vnode1.properties.key !== vnode2.properties.key) { - return false; - } - return vnode1.properties.bind === vnode2.properties.bind; - } - return !vnode1.properties && !vnode2.properties; - }; - var toTextVNode = function (data) { - return { - vnodeSelector: '', - properties: undefined, - children: undefined, - text: data.toString(), - domNode: null - }; - }; - var appendChildren = function (parentSelector, insertions, main) { - for (var i = 0; i < insertions.length; i++) { - var item = insertions[i]; - if (Array.isArray(item)) { - appendChildren(parentSelector, item, main); - } else { - if (item !== null && item !== undefined) { - if (!item.hasOwnProperty('vnodeSelector')) { - item = toTextVNode(item); - } - main.push(item); - } - } - } - }; - // Render helper functions - var missingTransition = function () { - throw new Error('Provide a transitions object to the projectionOptions to do animations'); - }; - var DEFAULT_PROJECTION_OPTIONS = { - namespace: undefined, - eventHandlerInterceptor: undefined, - styleApplyer: function (domNode, styleName, value) { - // Provides a hook to add vendor prefixes for browsers that still need it. - domNode.style[styleName] = value; - }, - transitions: { - enter: missingTransition, - exit: missingTransition - } - }; - var applyDefaultProjectionOptions = function (projectorOptions) { - return extend(DEFAULT_PROJECTION_OPTIONS, projectorOptions); - }; - var checkStyleValue = function (styleValue) { - if (typeof styleValue !== 'string') { - throw new Error('Style values must be strings'); - } - }; - var setProperties = function (domNode, properties, projectionOptions) { - if (!properties) { - return; - } - var eventHandlerInterceptor = projectionOptions.eventHandlerInterceptor; - var propNames = Object.keys(properties); - var propCount = propNames.length; - for (var i = 0; i < propCount; i++) { - var propName = propNames[i]; - /* tslint:disable:no-var-keyword: edge case */ - var propValue = properties[propName]; - /* tslint:enable:no-var-keyword */ - if (propName === 'className') { - throw new Error('Property "className" is not supported, use "class".'); - } else if (propName === 'class') { - if (domNode.className) { - // May happen if classes is specified before class - domNode.className += ' ' + propValue; - } else { - domNode.className = propValue; - } - } else if (propName === 'classes') { - // object with string keys and boolean values - var classNames = Object.keys(propValue); - var classNameCount = classNames.length; - for (var j = 0; j < classNameCount; j++) { - var className = classNames[j]; - if (propValue[className]) { - domNode.classList.add(className); - } - } - } else if (propName === 'styles') { - // object with string keys and string (!) values - var styleNames = Object.keys(propValue); - var styleCount = styleNames.length; - for (var j = 0; j < styleCount; j++) { - var styleName = styleNames[j]; - var styleValue = propValue[styleName]; - if (styleValue) { - checkStyleValue(styleValue); - projectionOptions.styleApplyer(domNode, styleName, styleValue); - } - } - } else if (propName === 'key') { - continue; - } else if (propValue === null || propValue === undefined) { - continue; - } else { - var type = typeof propValue; - if (type === 'function') { - if (propName.lastIndexOf('on', 0) === 0) { - if (eventHandlerInterceptor) { - propValue = eventHandlerInterceptor(propName, propValue, domNode, properties); // intercept eventhandlers - } - if (propName === 'oninput') { - (function () { - // record the evt.target.value, because IE and Edge sometimes do a requestAnimationFrame between changing value and running oninput - var oldPropValue = propValue; - propValue = function (evt) { - evt.target['oninput-value'] = evt.target.value; - // may be HTMLTextAreaElement as well - oldPropValue.apply(this, [evt]); - }; - }()); - } - domNode[propName] = propValue; - } - } else if (type === 'string' && propName !== 'value' && propName !== 'innerHTML') { - if (projectionOptions.namespace === NAMESPACE_SVG && propName === 'href') { - domNode.setAttributeNS(NAMESPACE_XLINK, propName, propValue); - } else { - domNode.setAttribute(propName, propValue); - } - } else { - domNode[propName] = propValue; - } - } - } - }; - var updateProperties = function (domNode, previousProperties, properties, projectionOptions) { - if (!properties) { - return; - } - var propertiesUpdated = false; - var propNames = Object.keys(properties); - var propCount = propNames.length; - for (var i = 0; i < propCount; i++) { - var propName = propNames[i]; - // assuming that properties will be nullified instead of missing is by design - var propValue = properties[propName]; - var previousValue = previousProperties[propName]; - if (propName === 'class') { - if (previousValue !== propValue) { - throw new Error('"class" property may not be updated. Use the "classes" property for conditional css classes.'); - } - } else if (propName === 'classes') { - var classList = domNode.classList; - var classNames = Object.keys(propValue); - var classNameCount = classNames.length; - for (var j = 0; j < classNameCount; j++) { - var className = classNames[j]; - var on = !!propValue[className]; - var previousOn = !!previousValue[className]; - if (on === previousOn) { - continue; - } - propertiesUpdated = true; - if (on) { - classList.add(className); - } else { - classList.remove(className); - } - } - } else if (propName === 'styles') { - var styleNames = Object.keys(propValue); - var styleCount = styleNames.length; - for (var j = 0; j < styleCount; j++) { - var styleName = styleNames[j]; - var newStyleValue = propValue[styleName]; - var oldStyleValue = previousValue[styleName]; - if (newStyleValue === oldStyleValue) { - continue; - } - propertiesUpdated = true; - if (newStyleValue) { - checkStyleValue(newStyleValue); - projectionOptions.styleApplyer(domNode, styleName, newStyleValue); - } else { - projectionOptions.styleApplyer(domNode, styleName, ''); - } - } - } else { - if (!propValue && typeof previousValue === 'string') { - propValue = ''; - } - if (propName === 'value') { - if (domNode[propName] !== propValue && domNode['oninput-value'] !== propValue) { - domNode[propName] = propValue; - // Reset the value, even if the virtual DOM did not change - domNode['oninput-value'] = undefined; - } - // else do not update the domNode, otherwise the cursor position would be changed - if (propValue !== previousValue) { - propertiesUpdated = true; - } - } else if (propValue !== previousValue) { - var type = typeof propValue; - if (type === 'function') { - throw new Error('Functions may not be updated on subsequent renders (property: ' + propName + '). Hint: declare event handler functions outside the render() function.'); - } - if (type === 'string' && propName !== 'innerHTML') { - if (projectionOptions.namespace === NAMESPACE_SVG && propName === 'href') { - domNode.setAttributeNS(NAMESPACE_XLINK, propName, propValue); - } else { - domNode.setAttribute(propName, propValue); - } - } else { - if (domNode[propName] !== propValue) { - domNode[propName] = propValue; - } - } - propertiesUpdated = true; - } - } - } - return propertiesUpdated; - }; - var findIndexOfChild = function (children, sameAs, start) { - if (sameAs.vnodeSelector !== '') { - // Never scan for text-nodes - for (var i = start; i < children.length; i++) { - if (same(children[i], sameAs)) { - return i; - } - } - } - return -1; - }; - var nodeAdded = function (vNode, transitions) { - if (vNode.properties) { - var enterAnimation = vNode.properties.enterAnimation; - if (enterAnimation) { - if (typeof enterAnimation === 'function') { - enterAnimation(vNode.domNode, vNode.properties); - } else { - transitions.enter(vNode.domNode, vNode.properties, enterAnimation); - } - } - } - }; - var nodeToRemove = function (vNode, transitions) { - var domNode = vNode.domNode; - if (vNode.properties) { - var exitAnimation = vNode.properties.exitAnimation; - if (exitAnimation) { - domNode.style.pointerEvents = 'none'; - var removeDomNode = function () { - if (domNode.parentNode) { - domNode.parentNode.removeChild(domNode); - } - }; - if (typeof exitAnimation === 'function') { - exitAnimation(domNode, removeDomNode, vNode.properties); - return; - } else { - transitions.exit(vNode.domNode, vNode.properties, exitAnimation, removeDomNode); - return; - } - } - } - if (domNode.parentNode) { - domNode.parentNode.removeChild(domNode); - } - }; - var checkDistinguishable = function (childNodes, indexToCheck, parentVNode, operation) { - var childNode = childNodes[indexToCheck]; - if (childNode.vnodeSelector === '') { - return; // Text nodes need not be distinguishable - } - var properties = childNode.properties; - var key = properties ? properties.key === undefined ? properties.bind : properties.key : undefined; - if (!key) { - for (var i = 0; i < childNodes.length; i++) { - if (i !== indexToCheck) { - var node = childNodes[i]; - if (same(node, childNode)) { - if (operation === 'added') { - throw new Error(parentVNode.vnodeSelector + ' had a ' + childNode.vnodeSelector + ' child ' + 'added, but there is now more than one. You must add unique key properties to make them distinguishable.'); - } else { - throw new Error(parentVNode.vnodeSelector + ' had a ' + childNode.vnodeSelector + ' child ' + 'removed, but there were more than one. You must add unique key properties to make them distinguishable.'); - } - } - } - } - } - }; - var createDom; - var updateDom; - var updateChildren = function (vnode, domNode, oldChildren, newChildren, projectionOptions) { - if (oldChildren === newChildren) { - return false; - } - oldChildren = oldChildren || emptyArray; - newChildren = newChildren || emptyArray; - var oldChildrenLength = oldChildren.length; - var newChildrenLength = newChildren.length; - var transitions = projectionOptions.transitions; - var oldIndex = 0; - var newIndex = 0; - var i; - var textUpdated = false; - while (newIndex < newChildrenLength) { - var oldChild = oldIndex < oldChildrenLength ? oldChildren[oldIndex] : undefined; - var newChild = newChildren[newIndex]; - if (oldChild !== undefined && same(oldChild, newChild)) { - textUpdated = updateDom(oldChild, newChild, projectionOptions) || textUpdated; - oldIndex++; - } else { - var findOldIndex = findIndexOfChild(oldChildren, newChild, oldIndex + 1); - if (findOldIndex >= 0) { - // Remove preceding missing children - for (i = oldIndex; i < findOldIndex; i++) { - nodeToRemove(oldChildren[i], transitions); - checkDistinguishable(oldChildren, i, vnode, 'removed'); - } - textUpdated = updateDom(oldChildren[findOldIndex], newChild, projectionOptions) || textUpdated; - oldIndex = findOldIndex + 1; - } else { - // New child - createDom(newChild, domNode, oldIndex < oldChildrenLength ? oldChildren[oldIndex].domNode : undefined, projectionOptions); - nodeAdded(newChild, transitions); - checkDistinguishable(newChildren, newIndex, vnode, 'added'); - } - } - newIndex++; - } - if (oldChildrenLength > oldIndex) { - // Remove child fragments - for (i = oldIndex; i < oldChildrenLength; i++) { - nodeToRemove(oldChildren[i], transitions); - checkDistinguishable(oldChildren, i, vnode, 'removed'); - } - } - return textUpdated; - }; - var addChildren = function (domNode, children, projectionOptions) { - if (!children) { - return; - } - for (var i = 0; i < children.length; i++) { - createDom(children[i], domNode, undefined, projectionOptions); - } - }; - var initPropertiesAndChildren = function (domNode, vnode, projectionOptions) { - addChildren(domNode, vnode.children, projectionOptions); - // children before properties, needed for value property of . - if (vnode.text) { - domNode.textContent = vnode.text; - } - setProperties(domNode, vnode.properties, projectionOptions); - if (vnode.properties && vnode.properties.afterCreate) { - vnode.properties.afterCreate(domNode, projectionOptions, vnode.vnodeSelector, vnode.properties, vnode.children); - } - }; - createDom = function (vnode, parentNode, insertBefore, projectionOptions) { - var domNode, i, c, start = 0, type, found; - var vnodeSelector = vnode.vnodeSelector; - if (vnodeSelector === '') { - domNode = vnode.domNode = document.createTextNode(vnode.text); - if (insertBefore !== undefined) { - parentNode.insertBefore(domNode, insertBefore); - } else { - parentNode.appendChild(domNode); - } - } else { - for (i = 0; i <= vnodeSelector.length; ++i) { - c = vnodeSelector.charAt(i); - if (i === vnodeSelector.length || c === '.' || c === '#') { - type = vnodeSelector.charAt(start - 1); - found = vnodeSelector.slice(start, i); - if (type === '.') { - domNode.classList.add(found); - } else if (type === '#') { - domNode.id = found; - } else { - if (found === 'svg') { - projectionOptions = extend(projectionOptions, { namespace: NAMESPACE_SVG }); - } - if (projectionOptions.namespace !== undefined) { - domNode = vnode.domNode = document.createElementNS(projectionOptions.namespace, found); - } else { - domNode = vnode.domNode = document.createElement(found); - } - if (insertBefore !== undefined) { - parentNode.insertBefore(domNode, insertBefore); - } else { - parentNode.appendChild(domNode); - } - } - start = i + 1; - } - } - initPropertiesAndChildren(domNode, vnode, projectionOptions); - } - }; - updateDom = function (previous, vnode, projectionOptions) { - var domNode = previous.domNode; - var textUpdated = false; - if (previous === vnode) { - return false; // By contract, VNode objects may not be modified anymore after passing them to maquette - } - var updated = false; - if (vnode.vnodeSelector === '') { - if (vnode.text !== previous.text) { - var newVNode = document.createTextNode(vnode.text); - domNode.parentNode.replaceChild(newVNode, domNode); - vnode.domNode = newVNode; - textUpdated = true; - return textUpdated; - } - } else { - if (vnode.vnodeSelector.lastIndexOf('svg', 0) === 0) { - projectionOptions = extend(projectionOptions, { namespace: NAMESPACE_SVG }); - } - if (previous.text !== vnode.text) { - updated = true; - if (vnode.text === undefined) { - domNode.removeChild(domNode.firstChild); // the only textnode presumably - } else { - domNode.textContent = vnode.text; - } - } - updated = updateChildren(vnode, domNode, previous.children, vnode.children, projectionOptions) || updated; - updated = updateProperties(domNode, previous.properties, vnode.properties, projectionOptions) || updated; - if (vnode.properties && vnode.properties.afterUpdate) { - vnode.properties.afterUpdate(domNode, projectionOptions, vnode.vnodeSelector, vnode.properties, vnode.children); - } - } - if (updated && vnode.properties && vnode.properties.updateAnimation) { - vnode.properties.updateAnimation(domNode, vnode.properties, previous.properties); - } - vnode.domNode = previous.domNode; - return textUpdated; - }; - var createProjection = function (vnode, projectionOptions) { - return { - update: function (updatedVnode) { - if (vnode.vnodeSelector !== updatedVnode.vnodeSelector) { - throw new Error('The selector for the root VNode may not be changed. (consider using dom.merge and add one extra level to the virtual DOM)'); - } - updateDom(vnode, updatedVnode, projectionOptions); - vnode = updatedVnode; - }, - domNode: vnode.domNode - }; - }; - ; - // The other two parameters are not added here, because the Typescript compiler creates surrogate code for desctructuring 'children'. - exports.h = function (selector) { - var properties = arguments[1]; - if (typeof selector !== 'string') { - throw new Error(); - } - var childIndex = 1; - if (properties && !properties.hasOwnProperty('vnodeSelector') && !Array.isArray(properties) && typeof properties === 'object') { - childIndex = 2; - } else { - // Optional properties argument was omitted - properties = undefined; - } - var text = undefined; - var children = undefined; - var argsLength = arguments.length; - // Recognize a common special case where there is only a single text node - if (argsLength === childIndex + 1) { - var onlyChild = arguments[childIndex]; - if (typeof onlyChild === 'string') { - text = onlyChild; - } else if (onlyChild !== undefined && onlyChild.length === 1 && typeof onlyChild[0] === 'string') { - text = onlyChild[0]; - } - } - if (text === undefined) { - children = []; - for (; childIndex < arguments.length; childIndex++) { - var child = arguments[childIndex]; - if (child === null || child === undefined) { - continue; - } else if (Array.isArray(child)) { - appendChildren(selector, child, children); - } else if (child.hasOwnProperty('vnodeSelector')) { - children.push(child); - } else { - children.push(toTextVNode(child)); - } - } - } - return { - vnodeSelector: selector, - properties: properties, - children: children, - text: text === '' ? undefined : text, - domNode: null - }; - }; - /** - * Contains simple low-level utility functions to manipulate the real DOM. - */ - exports.dom = { - /** - * Creates a real DOM tree from `vnode`. The [[Projection]] object returned will contain the resulting DOM Node in - * its [[Projection.domNode|domNode]] property. - * This is a low-level method. Users wil typically use a [[Projector]] instead. - * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] - * objects may only be rendered once. - * @param projectionOptions - Options to be used to create and update the projection. - * @returns The [[Projection]] which also contains the DOM Node that was created. - */ - create: function (vnode, projectionOptions) { - projectionOptions = applyDefaultProjectionOptions(projectionOptions); - createDom(vnode, document.createElement('div'), undefined, projectionOptions); - return createProjection(vnode, projectionOptions); - }, - /** - * Appends a new childnode to the DOM which is generated from a [[VNode]]. - * This is a low-level method. Users wil typically use a [[Projector]] instead. - * @param parentNode - The parent node for the new childNode. - * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] - * objects may only be rendered once. - * @param projectionOptions - Options to be used to create and update the [[Projection]]. - * @returns The [[Projection]] that was created. - */ - append: function (parentNode, vnode, projectionOptions) { - projectionOptions = applyDefaultProjectionOptions(projectionOptions); - createDom(vnode, parentNode, undefined, projectionOptions); - return createProjection(vnode, projectionOptions); - }, - /** - * Inserts a new DOM node which is generated from a [[VNode]]. - * This is a low-level method. Users wil typically use a [[Projector]] instead. - * @param beforeNode - The node that the DOM Node is inserted before. - * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. - * NOTE: [[VNode]] objects may only be rendered once. - * @param projectionOptions - Options to be used to create and update the projection, see [[createProjector]]. - * @returns The [[Projection]] that was created. - */ - insertBefore: function (beforeNode, vnode, projectionOptions) { - projectionOptions = applyDefaultProjectionOptions(projectionOptions); - createDom(vnode, beforeNode.parentNode, beforeNode, projectionOptions); - return createProjection(vnode, projectionOptions); - }, - /** - * Merges a new DOM node which is generated from a [[VNode]] with an existing DOM Node. - * This means that the virtual DOM and the real DOM will have one overlapping element. - * Therefore the selector for the root [[VNode]] will be ignored, but its properties and children will be applied to the Element provided. - * This is a low-level method. Users wil typically use a [[Projector]] instead. - * @param domNode - The existing element to adopt as the root of the new virtual DOM. Existing attributes and childnodes are preserved. - * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] objects - * may only be rendered once. - * @param projectionOptions - Options to be used to create and update the projection, see [[createProjector]]. - * @returns The [[Projection]] that was created. - */ - merge: function (element, vnode, projectionOptions) { - projectionOptions = applyDefaultProjectionOptions(projectionOptions); - vnode.domNode = element; - initPropertiesAndChildren(element, vnode, projectionOptions); - return createProjection(vnode, projectionOptions); - } - }; - /** - * Creates a [[CalculationCache]] object, useful for caching [[VNode]] trees. - * In practice, caching of [[VNode]] trees is not needed, because achieving 60 frames per second is almost never a problem. - * For more information, see [[CalculationCache]]. - * - * @param The type of the value that is cached. - */ - exports.createCache = function () { - var cachedInputs = undefined; - var cachedOutcome = undefined; - var result = { - invalidate: function () { - cachedOutcome = undefined; - cachedInputs = undefined; - }, - result: function (inputs, calculation) { - if (cachedInputs) { - for (var i = 0; i < inputs.length; i++) { - if (cachedInputs[i] !== inputs[i]) { - cachedOutcome = undefined; - } - } - } - if (!cachedOutcome) { - cachedOutcome = calculation(); - cachedInputs = inputs; - } - return cachedOutcome; - } - }; - return result; - }; - /** - * Creates a {@link Mapping} instance that keeps an array of result objects synchronized with an array of source objects. - * See {@link http://maquettejs.org/docs/arrays.html|Working with arrays}. - * - * @param The type of source items. A database-record for instance. - * @param The type of target items. A [[Component]] for instance. - * @param getSourceKey `function(source)` that must return a key to identify each source object. The result must either be a string or a number. - * @param createResult `function(source, index)` that must create a new result object from a given source. This function is identical - * to the `callback` argument in `Array.map(callback)`. - * @param updateResult `function(source, target, index)` that updates a result to an updated source. - */ - exports.createMapping = function (getSourceKey, createResult, updateResult) { - var keys = []; - var results = []; - return { - results: results, - map: function (newSources) { - var newKeys = newSources.map(getSourceKey); - var oldTargets = results.slice(); - var oldIndex = 0; - for (var i = 0; i < newSources.length; i++) { - var source = newSources[i]; - var sourceKey = newKeys[i]; - if (sourceKey === keys[oldIndex]) { - results[i] = oldTargets[oldIndex]; - updateResult(source, oldTargets[oldIndex], i); - oldIndex++; - } else { - var found = false; - for (var j = 1; j < keys.length; j++) { - var searchIndex = (oldIndex + j) % keys.length; - if (keys[searchIndex] === sourceKey) { - results[i] = oldTargets[searchIndex]; - updateResult(newSources[i], oldTargets[searchIndex], i); - oldIndex = searchIndex + 1; - found = true; - break; - } - } - if (!found) { - results[i] = createResult(source, i); - } - } - } - results.length = newSources.length; - keys = newKeys; - } - }; - }; - /** - * Creates a [[Projector]] instance using the provided projectionOptions. - * - * For more information, see [[Projector]]. - * - * @param projectionOptions Options that influence how the DOM is rendered and updated. - */ - exports.createProjector = function (projectorOptions) { - var projector; - var projectionOptions = applyDefaultProjectionOptions(projectorOptions); - projectionOptions.eventHandlerInterceptor = function (propertyName, eventHandler, domNode, properties) { - return function () { - // intercept function calls (event handlers) to do a render afterwards. - projector.scheduleRender(); - return eventHandler.apply(properties.bind || this, arguments); - }; - }; - var renderCompleted = true; - var scheduled; - var stopped = false; - var projections = []; - var renderFunctions = []; - // matches the projections array - var doRender = function () { - scheduled = undefined; - if (!renderCompleted) { - return; // The last render threw an error, it should be logged in the browser console. - } - renderCompleted = false; - for (var i = 0; i < projections.length; i++) { - var updatedVnode = renderFunctions[i](); - projections[i].update(updatedVnode); - } - renderCompleted = true; - }; - projector = { - scheduleRender: function () { - if (!scheduled && !stopped) { - scheduled = requestAnimationFrame(doRender); - } - }, - stop: function () { - if (scheduled) { - cancelAnimationFrame(scheduled); - scheduled = undefined; - } - stopped = true; - }, - resume: function () { - stopped = false; - renderCompleted = true; - projector.scheduleRender(); - }, - append: function (parentNode, renderMaquetteFunction) { - projections.push(exports.dom.append(parentNode, renderMaquetteFunction(), projectionOptions)); - renderFunctions.push(renderMaquetteFunction); - }, - insertBefore: function (beforeNode, renderMaquetteFunction) { - projections.push(exports.dom.insertBefore(beforeNode, renderMaquetteFunction(), projectionOptions)); - renderFunctions.push(renderMaquetteFunction); - }, - merge: function (domNode, renderMaquetteFunction) { - projections.push(exports.dom.merge(domNode, renderMaquetteFunction(), projectionOptions)); - renderFunctions.push(renderMaquetteFunction); - }, - replace: function (domNode, renderMaquetteFunction) { - var vnode = renderMaquetteFunction(); - createDom(vnode, domNode.parentNode, domNode, projectionOptions); - domNode.parentNode.removeChild(domNode); - projections.push(createProjection(vnode, projectionOptions)); - renderFunctions.push(renderMaquetteFunction); - }, - detach: function (renderMaquetteFunction) { - for (var i = 0; i < renderFunctions.length; i++) { - if (renderFunctions[i] === renderMaquetteFunction) { - renderFunctions.splice(i, 1); - return projections.splice(i, 1)[0]; - } - } - throw new Error('renderMaquetteFunction was not found'); - } - }; - return projector; - }; -})); diff --git a/plugins/UiConfig/media/js/utils/Animation.coffee b/plugins/UiConfig/media/js/utils/Animation.coffee deleted file mode 100644 index 271b88c1..00000000 --- a/plugins/UiConfig/media/js/utils/Animation.coffee +++ /dev/null @@ -1,138 +0,0 @@ -class Animation - slideDown: (elem, props) -> - if elem.offsetTop > 2000 - return - - h = elem.offsetHeight - cstyle = window.getComputedStyle(elem) - margin_top = cstyle.marginTop - margin_bottom = cstyle.marginBottom - padding_top = cstyle.paddingTop - padding_bottom = cstyle.paddingBottom - transition = cstyle.transition - - elem.style.boxSizing = "border-box" - elem.style.overflow = "hidden" - elem.style.transform = "scale(0.6)" - elem.style.opacity = "0" - elem.style.height = "0px" - elem.style.marginTop = "0px" - elem.style.marginBottom = "0px" - elem.style.paddingTop = "0px" - elem.style.paddingBottom = "0px" - elem.style.transition = "none" - - setTimeout (-> - elem.className += " animate-inout" - elem.style.height = h+"px" - elem.style.transform = "scale(1)" - elem.style.opacity = "1" - elem.style.marginTop = margin_top - elem.style.marginBottom = margin_bottom - elem.style.paddingTop = padding_top - elem.style.paddingBottom = padding_bottom - ), 1 - - elem.addEventListener "transitionend", -> - elem.classList.remove("animate-inout") - elem.style.transition = elem.style.transform = elem.style.opacity = elem.style.height = null - elem.style.boxSizing = elem.style.marginTop = elem.style.marginBottom = null - elem.style.paddingTop = elem.style.paddingBottom = elem.style.overflow = null - elem.removeEventListener "transitionend", arguments.callee, false - - - slideUp: (elem, remove_func, props) -> - if elem.offsetTop > 1000 - return remove_func() - - elem.className += " animate-back" - elem.style.boxSizing = "border-box" - elem.style.height = elem.offsetHeight+"px" - elem.style.overflow = "hidden" - elem.style.transform = "scale(1)" - elem.style.opacity = "1" - elem.style.pointerEvents = "none" - setTimeout (-> - elem.style.height = "0px" - elem.style.marginTop = "0px" - elem.style.marginBottom = "0px" - elem.style.paddingTop = "0px" - elem.style.paddingBottom = "0px" - elem.style.transform = "scale(0.8)" - elem.style.borderTopWidth = "0px" - elem.style.borderBottomWidth = "0px" - elem.style.opacity = "0" - ), 1 - elem.addEventListener "transitionend", (e) -> - if e.propertyName == "opacity" or e.elapsedTime >= 0.6 - elem.removeEventListener "transitionend", arguments.callee, false - remove_func() - - - slideUpInout: (elem, remove_func, props) -> - elem.className += " animate-inout" - elem.style.boxSizing = "border-box" - elem.style.height = elem.offsetHeight+"px" - elem.style.overflow = "hidden" - elem.style.transform = "scale(1)" - elem.style.opacity = "1" - elem.style.pointerEvents = "none" - setTimeout (-> - elem.style.height = "0px" - elem.style.marginTop = "0px" - elem.style.marginBottom = "0px" - elem.style.paddingTop = "0px" - elem.style.paddingBottom = "0px" - elem.style.transform = "scale(0.8)" - elem.style.borderTopWidth = "0px" - elem.style.borderBottomWidth = "0px" - elem.style.opacity = "0" - ), 1 - elem.addEventListener "transitionend", (e) -> - if e.propertyName == "opacity" or e.elapsedTime >= 0.6 - elem.removeEventListener "transitionend", arguments.callee, false - remove_func() - - - showRight: (elem, props) -> - elem.className += " animate" - elem.style.opacity = 0 - elem.style.transform = "TranslateX(-20px) Scale(1.01)" - setTimeout (-> - elem.style.opacity = 1 - elem.style.transform = "TranslateX(0px) Scale(1)" - ), 1 - elem.addEventListener "transitionend", -> - elem.classList.remove("animate") - elem.style.transform = elem.style.opacity = null - - - show: (elem, props) -> - delay = arguments[arguments.length-2]?.delay*1000 or 1 - elem.style.opacity = 0 - setTimeout (-> - elem.className += " animate" - ), 1 - setTimeout (-> - elem.style.opacity = 1 - ), delay - elem.addEventListener "transitionend", -> - elem.classList.remove("animate") - elem.style.opacity = null - elem.removeEventListener "transitionend", arguments.callee, false - - hide: (elem, remove_func, props) -> - delay = arguments[arguments.length-2]?.delay*1000 or 1 - elem.className += " animate" - setTimeout (-> - elem.style.opacity = 0 - ), delay - elem.addEventListener "transitionend", (e) -> - if e.propertyName == "opacity" - remove_func() - - addVisibleClass: (elem, props) -> - setTimeout -> - elem.classList.add("visible") - -window.Animation = new Animation() \ No newline at end of file diff --git a/plugins/UiConfig/media/js/utils/Dollar.coffee b/plugins/UiConfig/media/js/utils/Dollar.coffee deleted file mode 100644 index 7f19f551..00000000 --- a/plugins/UiConfig/media/js/utils/Dollar.coffee +++ /dev/null @@ -1,3 +0,0 @@ -window.$ = (selector) -> - if selector.startsWith("#") - return document.getElementById(selector.replace("#", "")) diff --git a/plugins/UiConfig/media/js/utils/ZeroFrame.coffee b/plugins/UiConfig/media/js/utils/ZeroFrame.coffee deleted file mode 100644 index 11512d16..00000000 --- a/plugins/UiConfig/media/js/utils/ZeroFrame.coffee +++ /dev/null @@ -1,85 +0,0 @@ -class ZeroFrame extends Class - constructor: (url) -> - @url = url - @waiting_cb = {} - @wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1") - @connect() - @next_message_id = 1 - @history_state = {} - @init() - - - init: -> - @ - - - connect: -> - @target = window.parent - window.addEventListener("message", @onMessage, false) - @cmd("innerReady") - - # Save scrollTop - window.addEventListener "beforeunload", (e) => - @log "save scrollTop", window.pageYOffset - @history_state["scrollTop"] = window.pageYOffset - @cmd "wrapperReplaceState", [@history_state, null] - - # Restore scrollTop - @cmd "wrapperGetState", [], (state) => - @history_state = state if state? - @log "restore scrollTop", state, window.pageYOffset - if window.pageYOffset == 0 and state - window.scroll(window.pageXOffset, state.scrollTop) - - - onMessage: (e) => - message = e.data - cmd = message.cmd - if cmd == "response" - if @waiting_cb[message.to]? - @waiting_cb[message.to](message.result) - else - @log "Websocket callback not found:", message - else if cmd == "wrapperReady" # Wrapper inited later - @cmd("innerReady") - else if cmd == "ping" - @response message.id, "pong" - else if cmd == "wrapperOpenedWebsocket" - @onOpenWebsocket() - else if cmd == "wrapperClosedWebsocket" - @onCloseWebsocket() - else - @onRequest cmd, message.params - - - onRequest: (cmd, message) => - @log "Unknown request", message - - - response: (to, result) -> - @send {"cmd": "response", "to": to, "result": result} - - - cmd: (cmd, params={}, cb=null) -> - @send {"cmd": cmd, "params": params}, cb - - - send: (message, cb=null) -> - message.wrapper_nonce = @wrapper_nonce - message.id = @next_message_id - @next_message_id += 1 - @target.postMessage(message, "*") - if cb - @waiting_cb[message.id] = cb - - - onOpenWebsocket: => - @log "Websocket open" - - - onCloseWebsocket: => - @log "Websocket close" - - - -window.ZeroFrame = ZeroFrame diff --git a/plugins/Zeroname/README.md b/plugins/Zeroname/README.md deleted file mode 100644 index 8a306789..00000000 --- a/plugins/Zeroname/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# ZeroName - -Zeroname plugin to connect Namecoin and register all the .bit domain name. - -## Start - -You can create your own Zeroname. - -### Namecoin node - -You need to run a namecoin node. - -[Namecoin](https://namecoin.org/download/) - -You will need to start it as a RPC server. - -Example of `~/.namecoin/namecoin.conf` minimal setup: -``` -daemon=1 -rpcuser=your-name -rpcpassword=your-password -rpcport=8336 -server=1 -txindex=1 -``` - -Don't forget to change the `rpcuser` value and `rpcpassword` value! - -You can start your node : `./namecoind` - -### Create a Zeroname site - -You will also need to create a site `python zeronet.py createSite` and regitser the info. - -In the site you will need to create a file `./data//data/names.json` with this is it: -``` -{} -``` - -### `zeroname_config.json` file - -In `~/.namecoin/zeroname_config.json` -``` -{ - "lastprocessed": 223910, - "zeronet_path": "/root/ZeroNet", # Update with your path - "privatekey": "", # Update with your private key of your site - "site": "" # Update with the address of your site -} -``` - -### Run updater - -You can now run the script : `updater/zeroname_updater.py` and wait until it is fully sync (it might take a while). diff --git a/plugins/Zeroname/SiteManagerPlugin.py b/plugins/Zeroname/SiteManagerPlugin.py deleted file mode 100644 index 40088f12..00000000 --- a/plugins/Zeroname/SiteManagerPlugin.py +++ /dev/null @@ -1,84 +0,0 @@ -import logging -import re -import time - -from Config import config -from Plugin import PluginManager - -allow_reload = False # No reload supported - -log = logging.getLogger("ZeronamePlugin") - - -@PluginManager.registerTo("SiteManager") -class SiteManagerPlugin(object): - site_zeroname = None - db_domains = None - db_domains_modified = None - - def load(self, *args, **kwargs): - super(SiteManagerPlugin, self).load(*args, **kwargs) - if not self.get(config.bit_resolver): - self.need(config.bit_resolver) # Need ZeroName site - - # Checks if it's a valid address - def isAddress(self, address): - return self.isBitDomain(address) or super(SiteManagerPlugin, self).isAddress(address) - - # Return: True if the address is domain - def isDomain(self, address): - return self.isBitDomain(address) or super(SiteManagerPlugin, self).isDomain(address) - - # Return: True if the address is .bit domain - def isBitDomain(self, address): - return re.match(r"(.*?)([A-Za-z0-9_-]+\.bit)$", address) - - # Resolve domain - # Return: The address or None - def resolveDomain(self, domain): - domain = domain.lower() - if not self.site_zeroname: - self.site_zeroname = self.need(config.bit_resolver) - - site_zeroname_modified = self.site_zeroname.content_manager.contents.get("content.json", {}).get("modified", 0) - if not self.db_domains or self.db_domains_modified != site_zeroname_modified: - self.site_zeroname.needFile("data/names.json", priority=10) - s = time.time() - self.db_domains = self.site_zeroname.storage.loadJson("data/names.json") - log.debug( - "Domain db with %s entries loaded in %.3fs (modification: %s -> %s)" % - (len(self.db_domains), time.time() - s, self.db_domains_modified, site_zeroname_modified) - ) - self.db_domains_modified = site_zeroname_modified - return self.db_domains.get(domain) - - # Return or create site and start download site files - # Return: Site or None if dns resolve failed - def need(self, address, *args, **kwargs): - if self.isBitDomain(address): # Its looks like a domain - address_resolved = self.resolveDomain(address) - if address_resolved: - address = address_resolved - else: - return None - - return super(SiteManagerPlugin, self).need(address, *args, **kwargs) - - # Return: Site object or None if not found - def get(self, address): - if not self.loaded: # Not loaded yet - self.load() - if self.isBitDomain(address): # Its looks like a domain - address_resolved = self.resolveDomain(address) - if address_resolved: # Domain found - site = self.sites.get(address_resolved) - if site: - site_domain = site.settings.get("domain") - if site_domain != address: - site.settings["domain"] = address - else: # Domain not found - site = self.sites.get(address) - - else: # Access by site address - site = super(SiteManagerPlugin, self).get(address) - return site diff --git a/plugins/Zeroname/UiRequestPlugin.py b/plugins/Zeroname/UiRequestPlugin.py deleted file mode 100644 index b0230524..00000000 --- a/plugins/Zeroname/UiRequestPlugin.py +++ /dev/null @@ -1,30 +0,0 @@ -import re - -from Plugin import PluginManager - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - - def __init__(self, *args, **kwargs): - from Site import SiteManager - self.site_manager = SiteManager.site_manager - super(UiRequestPlugin, self).__init__(*args, **kwargs) - - # Media request - def actionSiteMedia(self, path, **kwargs): - match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) - if match: # Its a valid domain, resolve first - domain = match.group("address") - address = self.site_manager.resolveDomain(domain) - if address: - path = "/media/" + address + match.group("inner_path") - return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs) # Get the wrapper frame output - -@PluginManager.registerTo("ConfigPlugin") -class ConfigPlugin(object): - def createArguments(self): - group = self.parser.add_argument_group("Zeroname plugin") - group.add_argument('--bit_resolver', help='ZeroNet site to resolve .bit domains', default="1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F", metavar="address") - - return super(ConfigPlugin, self).createArguments() diff --git a/plugins/Zeroname/__init__.py b/plugins/Zeroname/__init__.py deleted file mode 100644 index 889802db..00000000 --- a/plugins/Zeroname/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -import UiRequestPlugin -import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/Zeroname/updater/zeroname_updater.py b/plugins/Zeroname/updater/zeroname_updater.py deleted file mode 100644 index a8eedd13..00000000 --- a/plugins/Zeroname/updater/zeroname_updater.py +++ /dev/null @@ -1,246 +0,0 @@ -import time -import json -import os -import sys -import re -import socket - -from subprocess import call -from bitcoinrpc.authproxy import AuthServiceProxy - - -def publish(): - print "* Signing and Publishing..." - call(" ".join(command_sign_publish), shell=True) - - -def processNameOp(domain, value, test=False): - if not value.strip().startswith("{"): - return False - try: - data = json.loads(value) - except Exception, err: - print "Json load error: %s" % err - return False - if "zeronet" not in data and "map" not in data: - # Namecoin standard use {"map": { "blog": {"zeronet": "1D..."} }} - print "No zeronet and no map in ", data.keys() - return False - if "map" in data: - # If subdomains using the Namecoin standard is present, just re-write in the Zeronet way - # and call the function again - data_map = data["map"] - new_value = {} - for subdomain in data_map: - if "zeronet" in data_map[subdomain]: - new_value[subdomain] = data_map[subdomain]["zeronet"] - if "zeronet" in data and isinstance(data["zeronet"], basestring): - # { - # "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9", - # .... - # } - new_value[""] = data["zeronet"] - if len(new_value) > 0: - return processNameOp(domain, json.dumps({"zeronet": new_value}), test) - else: - return False - if "zeronet" in data and isinstance(data["zeronet"], basestring): - # { - # "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9" - # } is valid - return processNameOp(domain, json.dumps({"zeronet": { "": data["zeronet"]}}), test) - if not isinstance(data["zeronet"], dict): - print "Not dict: ", data["zeronet"] - return False - if not re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", domain): - print "Invalid domain: ", domain - return False - - if test: - return True - - if "slave" in sys.argv: - print "Waiting for master update arrive" - time.sleep(30) # Wait 30 sec to allow master updater - - # Note: Requires the file data/names.json to exist and contain "{}" to work - names_raw = open(names_path, "rb").read() - names = json.loads(names_raw) - for subdomain, address in data["zeronet"].items(): - subdomain = subdomain.lower() - address = re.sub("[^A-Za-z0-9]", "", address) - print subdomain, domain, "->", address - if subdomain: - if re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", subdomain): - names["%s.%s.bit" % (subdomain, domain)] = address - else: - print "Invalid subdomain:", domain, subdomain - else: - names["%s.bit" % domain] = address - - new_names_raw = json.dumps(names, indent=2, sort_keys=True) - if new_names_raw != names_raw: - open(names_path, "wb").write(new_names_raw) - print "-", domain, "Changed" - return True - else: - print "-", domain, "Not changed" - return False - - -def processBlock(block_id, test=False): - print "Processing block #%s..." % block_id - s = time.time() - block_hash = rpc.getblockhash(block_id) - block = rpc.getblock(block_hash) - - print "Checking %s tx" % len(block["tx"]) - updated = 0 - for tx in block["tx"]: - try: - transaction = rpc.getrawtransaction(tx, 1) - for vout in transaction.get("vout", []): - if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]: - name_op = vout["scriptPubKey"]["nameOp"] - updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"], test) - except Exception, err: - print "Error processing tx #%s %s" % (tx, err) - print "Done in %.3fs (updated %s)." % (time.time() - s, updated) - return updated - -# Connecting to RPC -def initRpc(config): - """Initialize Namecoin RPC""" - rpc_data = { - 'connect': '127.0.0.1', - 'port': '8336', - 'user': 'PLACEHOLDER', - 'password': 'PLACEHOLDER', - 'clienttimeout': '900' - } - try: - fptr = open(config, 'r') - lines = fptr.readlines() - fptr.close() - except: - return None # Or take some other appropriate action - - for line in lines: - if not line.startswith('rpc'): - continue - key_val = line.split(None, 1)[0] - (key, val) = key_val.split('=', 1) - if not key or not val: - continue - rpc_data[key[3:]] = val - - url = 'http://%(user)s:%(password)s@%(connect)s:%(port)s' % rpc_data - - return url, int(rpc_data['clienttimeout']) - -# Loading config... - -# Check whether platform is on windows or linux -# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin - -if sys.platform == "win32": - namecoin_location = os.getenv('APPDATA') + "/Namecoin/" -else: - namecoin_location = os.path.expanduser("~/.namecoin/") - -config_path = namecoin_location + 'zeroname_config.json' -if not os.path.isfile(config_path): # Create sample config - open(config_path, "w").write( - json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet', 'privatekey': '', 'lastprocessed': 223910}, indent=2) - ) - print "* Example config written to %s" % config_path - sys.exit(0) - -config = json.load(open(config_path)) -names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"]) -os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is. - -# Parameters to sign and publish -command_sign_publish = [sys.executable, "zeronet.py", "siteSign", config["site"], config["privatekey"], "--publish"] -if sys.platform == 'win32': - command_sign_publish = ['"%s"' % param for param in command_sign_publish] - -# Initialize rpc connection -rpc_auth, rpc_timeout = initRpc(namecoin_location + "namecoin.conf") -rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) - -node_version = rpc.getnetworkinfo()['version'] - -while 1: - try: - time.sleep(1) - if node_version < 160000 : - last_block = int(rpc.getinfo()["blocks"]) - else: - last_block = int(rpc.getblockchaininfo()["blocks"]) - break # Connection succeeded - except socket.timeout: # Timeout - print ".", - sys.stdout.flush() - except Exception, err: - print "Exception", err.__class__, err - time.sleep(5) - rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) - -if not config["lastprocessed"]: # First startup: Start processing from last block - config["lastprocessed"] = last_block - - -print "- Testing domain parsing..." -assert processBlock(223911, test=True) # Testing zeronetwork.bit -assert processBlock(227052, test=True) # Testing brainwallets.bit -assert not processBlock(236824, test=True) # Utf8 domain name (invalid should skip) -assert not processBlock(236752, test=True) # Uppercase domain (invalid should skip) -assert processBlock(236870, test=True) # Encoded domain (should pass) -assert processBlock(438317, test=True) # Testing namecoin standard artifaxradio.bit (should pass) -# sys.exit(0) - -print "- Parsing skipped blocks..." -should_publish = False -for block_id in range(config["lastprocessed"], last_block + 1): - if processBlock(block_id): - should_publish = True -config["lastprocessed"] = last_block - -if should_publish: - publish() - -while 1: - print "- Waiting for new block" - sys.stdout.flush() - while 1: - try: - time.sleep(1) - if node_version < 160000 : - rpc.waitforblock() - else: - rpc.waitfornewblock() - print "Found" - break # Block found - except socket.timeout: # Timeout - print ".", - sys.stdout.flush() - except Exception, err: - print "Exception", err.__class__, err - time.sleep(5) - rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) - - if node_version < 160000 : - last_block = int(rpc.getinfo()["blocks"]) - else: - last_block = int(rpc.getblockchaininfo()["blocks"]) - should_publish = False - for block_id in range(config["lastprocessed"] + 1, last_block + 1): - if processBlock(block_id): - should_publish = True - - config["lastprocessed"] = last_block - open(config_path, "w").write(json.dumps(config, indent=2)) - - if should_publish: - publish() diff --git a/plugins/disabled-Bootstrapper/BootstrapperDb.py b/plugins/disabled-Bootstrapper/BootstrapperDb.py deleted file mode 100644 index 08005150..00000000 --- a/plugins/disabled-Bootstrapper/BootstrapperDb.py +++ /dev/null @@ -1,156 +0,0 @@ -import time -import re - -import gevent - -from Config import config -from Db import Db -from util import helper - - -class BootstrapperDb(Db): - def __init__(self): - self.version = 7 - self.hash_ids = {} # hash -> id cache - super(BootstrapperDb, self).__init__({"db_name": "Bootstrapper"}, "%s/bootstrapper.db" % config.data_dir) - self.foreign_keys = True - self.checkTables() - self.updateHashCache() - gevent.spawn(self.cleanup) - - def cleanup(self): - while 1: - time.sleep(4 * 60) - timeout = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time() - 60 * 40)) - self.execute("DELETE FROM peer WHERE date_announced < ?", [timeout]) - - def updateHashCache(self): - res = self.execute("SELECT * FROM hash") - self.hash_ids = {str(row["hash"]): row["hash_id"] for row in res} - self.log.debug("Loaded %s hash_ids" % len(self.hash_ids)) - - def checkTables(self): - version = int(self.execute("PRAGMA user_version").fetchone()[0]) - self.log.debug("Db version: %s, needed: %s" % (version, self.version)) - if version < self.version: - self.createTables() - else: - self.execute("VACUUM") - - def createTables(self): - # Delete all tables - self.execute("PRAGMA writable_schema = 1") - self.execute("DELETE FROM sqlite_master WHERE type IN ('table', 'index', 'trigger')") - self.execute("PRAGMA writable_schema = 0") - self.execute("VACUUM") - self.execute("PRAGMA INTEGRITY_CHECK") - # Create new tables - self.execute(""" - CREATE TABLE peer ( - peer_id INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE, - type TEXT, - address TEXT, - port INTEGER NOT NULL, - date_added DATETIME DEFAULT (CURRENT_TIMESTAMP), - date_announced DATETIME DEFAULT (CURRENT_TIMESTAMP) - ); - """) - self.execute("CREATE UNIQUE INDEX peer_key ON peer (address, port);") - - self.execute(""" - CREATE TABLE peer_to_hash ( - peer_to_hash_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE NOT NULL, - peer_id INTEGER REFERENCES peer (peer_id) ON DELETE CASCADE, - hash_id INTEGER REFERENCES hash (hash_id) - ); - """) - self.execute("CREATE INDEX peer_id ON peer_to_hash (peer_id);") - self.execute("CREATE INDEX hash_id ON peer_to_hash (hash_id);") - - self.execute(""" - CREATE TABLE hash ( - hash_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE NOT NULL, - hash BLOB UNIQUE NOT NULL, - date_added DATETIME DEFAULT (CURRENT_TIMESTAMP) - ); - """) - self.execute("PRAGMA user_version = %s" % self.version) - - def getHashId(self, hash): - if hash not in self.hash_ids: - self.log.debug("New hash: %s" % repr(hash)) - self.execute("INSERT OR IGNORE INTO hash ?", {"hash": buffer(hash)}) - self.hash_ids[hash] = self.cur.cursor.lastrowid - return self.hash_ids[hash] - - def peerAnnounce(self, ip_type, address, port=None, hashes=[], onion_signed=False, delete_missing_hashes=False): - hashes_ids_announced = [] - for hash in hashes: - hashes_ids_announced.append(self.getHashId(hash)) - - # Check user - res = self.execute("SELECT peer_id FROM peer WHERE ? LIMIT 1", {"address": address, "port": port}) - - user_row = res.fetchone() - now = time.strftime("%Y-%m-%d %H:%M:%S") - if user_row: - peer_id = user_row["peer_id"] - self.execute("UPDATE peer SET date_announced = ? WHERE peer_id = ?", (now, peer_id)) - else: - self.log.debug("New peer: %s signed: %s" % (address, onion_signed)) - if ip_type == "onion" and not onion_signed: - return len(hashes) - self.execute("INSERT INTO peer ?", {"type": ip_type, "address": address, "port": port, "date_announced": now}) - peer_id = self.cur.cursor.lastrowid - - # Check user's hashes - res = self.execute("SELECT * FROM peer_to_hash WHERE ?", {"peer_id": peer_id}) - hash_ids_db = [row["hash_id"] for row in res] - if hash_ids_db != hashes_ids_announced: - hash_ids_added = set(hashes_ids_announced) - set(hash_ids_db) - hash_ids_removed = set(hash_ids_db) - set(hashes_ids_announced) - if ip_type != "onion" or onion_signed: - for hash_id in hash_ids_added: - self.execute("INSERT INTO peer_to_hash ?", {"peer_id": peer_id, "hash_id": hash_id}) - if hash_ids_removed and delete_missing_hashes: - self.execute("DELETE FROM peer_to_hash WHERE ?", {"peer_id": peer_id, "hash_id": list(hash_ids_removed)}) - - return len(hash_ids_added) + len(hash_ids_removed) - else: - return 0 - - def peerList(self, hash, address=None, onions=[], port=None, limit=30, need_types=["ipv4", "onion"], order=True): - back = {"ipv4": [], "ipv6": [], "onion": []} - if limit == 0: - return back - hashid = self.getHashId(hash) - - if order: - order_sql = "ORDER BY date_announced DESC" - else: - order_sql = "" - where_sql = "hash_id = :hashid" - if onions: - onions_escaped = ["'%s'" % re.sub("[^a-z0-9,]", "", onion) for onion in onions if type(onion) is str] - where_sql += " AND address NOT IN (%s)" % ",".join(onions_escaped) - elif address: - where_sql += " AND NOT (address = :address AND port = :port)" - - query = """ - SELECT type, address, port - FROM peer_to_hash - LEFT JOIN peer USING (peer_id) - WHERE %s - %s - LIMIT :limit - """ % (where_sql, order_sql) - res = self.execute(query, {"hashid": hashid, "address": address, "port": port, "limit": limit}) - - for row in res: - if row["type"] in need_types: - if row["type"] == "onion": - packed = helper.packOnionAddress(row["address"], row["port"]) - else: - packed = helper.packAddress(str(row["address"]), row["port"]) - back[row["type"]].append(packed) - return back diff --git a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py deleted file mode 100644 index ba6d1e23..00000000 --- a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py +++ /dev/null @@ -1,157 +0,0 @@ -import time - -from util import helper - -from Plugin import PluginManager -from BootstrapperDb import BootstrapperDb -from Crypt import CryptRsa -from Config import config - -if "db" not in locals().keys(): # Share during reloads - db = BootstrapperDb() - - -@PluginManager.registerTo("FileRequest") -class FileRequestPlugin(object): - def checkOnionSigns(self, onions, onion_signs, onion_sign_this): - if not onion_signs or len(onion_signs) != len(set(onions)): - return False - - if time.time() - float(onion_sign_this) > 3 * 60: - return False # Signed out of allowed 3 minutes - - onions_signed = [] - # Check onion signs - for onion_publickey, onion_sign in onion_signs.items(): - if CryptRsa.verify(onion_sign_this, onion_publickey, onion_sign): - onions_signed.append(CryptRsa.publickeyToOnion(onion_publickey)) - else: - break - - # Check if the same onion addresses signed as the announced onces - if sorted(onions_signed) == sorted(set(onions)): - return True - else: - return False - - def actionAnnounce(self, params): - time_started = time.time() - s = time.time() - # Backward compatibility - if "ip4" in params["add"]: - params["add"].append("ipv4") - if "ip4" in params["need_types"]: - params["need_types"].append("ipv4") - - hashes = params["hashes"] - - all_onions_signed = self.checkOnionSigns(params.get("onions", []), params.get("onion_signs"), params.get("onion_sign_this")) - - time_onion_check = time.time() - s - - ip_type = helper.getIpType(self.connection.ip) - - if ip_type == "onion" or self.connection.ip in config.ip_local: - is_port_open = False - elif ip_type in params["add"]: - is_port_open = True - else: - is_port_open = False - - s = time.time() - # Separatley add onions to sites or at once if no onions present - i = 0 - onion_to_hash = {} - for onion in params.get("onions", []): - if onion not in onion_to_hash: - onion_to_hash[onion] = [] - onion_to_hash[onion].append(hashes[i]) - i += 1 - - hashes_changed = 0 - db.execute("BEGIN") - for onion, onion_hashes in onion_to_hash.iteritems(): - hashes_changed += db.peerAnnounce( - ip_type="onion", - address=onion, - port=params["port"], - hashes=onion_hashes, - onion_signed=all_onions_signed - ) - db.execute("END") - time_db_onion = time.time() - s - - s = time.time() - - if is_port_open: - hashes_changed += db.peerAnnounce( - ip_type=ip_type, - address=self.connection.ip, - port=params["port"], - hashes=hashes, - delete_missing_hashes=params.get("delete") - ) - time_db_ip = time.time() - s - - s = time.time() - # Query sites - back = {} - peers = [] - if params.get("onions") and not all_onions_signed and hashes_changed: - back["onion_sign_this"] = "%.0f" % time.time() # Send back nonce for signing - - if len(hashes) > 500 or not hashes_changed: - limit = 5 - order = False - else: - limit = 30 - order = True - for hash in hashes: - if time.time() - time_started > 1: # 1 sec limit on request - self.connection.log("Announce time limit exceeded after %s/%s sites" % (len(peers), len(hashes))) - break - - hash_peers = db.peerList( - hash, - address=self.connection.ip, onions=onion_to_hash.keys(), port=params["port"], - limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order - ) - if "ip4" in params["need_types"]: # Backward compatibility - hash_peers["ip4"] = hash_peers["ipv4"] - del(hash_peers["ipv4"]) - peers.append(hash_peers) - time_peerlist = time.time() - s - - back["peers"] = peers - self.connection.log( - "Announce %s sites (onions: %s, onion_check: %.3fs, db_onion: %.3fs, db_ip: %.3fs, peerlist: %.3fs, limit: %s)" % - (len(hashes), len(onion_to_hash), time_onion_check, time_db_onion, time_db_ip, time_peerlist, limit) - ) - self.response(back) - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - def actionStatsBootstrapper(self): - self.sendHeader() - - # Style - yield """ - - """ - - hash_rows = db.execute("SELECT * FROM hash").fetchall() - for hash_row in hash_rows: - peer_rows = db.execute( - "SELECT * FROM peer LEFT JOIN peer_to_hash USING (peer_id) WHERE hash_id = :hash_id", - {"hash_id": hash_row["hash_id"]} - ).fetchall() - - yield "
    %s (added: %s, peers: %s)
    " % ( - str(hash_row["hash"]).encode("hex"), hash_row["date_added"], len(peer_rows) - ) - for peer_row in peer_rows: - yield " - {ip4: <30} {onion: <30} added: {date_added}, announced: {date_announced}
    ".format(**dict(peer_row)) diff --git a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py deleted file mode 100644 index d99f8ea7..00000000 --- a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py +++ /dev/null @@ -1,224 +0,0 @@ -import hashlib -import os - -import pytest - -from Bootstrapper import BootstrapperPlugin -from Bootstrapper.BootstrapperDb import BootstrapperDb -from Peer import Peer -from Crypt import CryptRsa -from util import helper - - -@pytest.fixture() -def bootstrapper_db(request): - BootstrapperPlugin.db.close() - BootstrapperPlugin.db = BootstrapperDb() - BootstrapperPlugin.db.createTables() # Reset db - BootstrapperPlugin.db.cur.logging = True - - def cleanup(): - BootstrapperPlugin.db.close() - os.unlink(BootstrapperPlugin.db.db_path) - - request.addfinalizer(cleanup) - return BootstrapperPlugin.db - - -@pytest.mark.usefixtures("resetSettings") -class TestBootstrapper: - def testBootstrapperDb(self, file_server, bootstrapper_db): - ip_type = helper.getIpType(file_server.ip) - peer = Peer(file_server.ip, 1544, connection_server=file_server) - hash1 = hashlib.sha256("site1").digest() - hash2 = hashlib.sha256("site2").digest() - hash3 = hashlib.sha256("site3").digest() - - # Verify empty result - res = peer.request("announce", { - "hashes": [hash1, hash2], - "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] - }) - - assert len(res["peers"][0][ip_type]) == 0 # Empty result - - # Verify added peer on previous request - bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1, hash2], delete_missing_hashes=True) - - res = peer.request("announce", { - "hashes": [hash1, hash2], - "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] - }) - assert len(res["peers"][0][ip_type]) == 1 - assert len(res["peers"][1][ip_type]) == 1 - - # hash2 deleted from 1.2.3.4 - bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1], delete_missing_hashes=True) - res = peer.request("announce", { - "hashes": [hash1, hash2], - "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] - }) - assert len(res["peers"][0][ip_type]) == 1 - assert len(res["peers"][1][ip_type]) == 0 - - # Announce 3 hash again - bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1, hash2, hash3], delete_missing_hashes=True) - res = peer.request("announce", { - "hashes": [hash1, hash2, hash3], - "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] - }) - assert len(res["peers"][0][ip_type]) == 1 - assert len(res["peers"][1][ip_type]) == 1 - assert len(res["peers"][2][ip_type]) == 1 - - # Single hash announce - res = peer.request("announce", { - "hashes": [hash1], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] - }) - assert len(res["peers"][0][ip_type]) == 1 - - # Test DB cleanup - assert map(lambda row: row[0], bootstrapper_db.execute("SELECT address FROM peer").fetchall()) == [file_server.ip_external] # 127.0.0.1 never get added to db - - # Delete peers - bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external]) - assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer_to_hash").fetchone()["num"] == 0 - - assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM hash").fetchone()["num"] == 3 # 3 sites - assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 0 # 0 peer - - def testPassive(self, file_server, bootstrapper_db): - peer = Peer(file_server.ip, 1544, connection_server=file_server) - ip_type = helper.getIpType(file_server.ip) - hash1 = hashlib.sha256("hash1").digest() - - bootstrapper_db.peerAnnounce(ip_type, address=None, port=15441, hashes=[hash1]) - res = peer.request("announce", { - "hashes": [hash1], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [] - }) - - assert len(res["peers"][0]["ipv4"]) == 0 # Empty result - - def testAddOnion(self, file_server, site, bootstrapper_db, tor_manager): - onion1 = tor_manager.addOnion() - onion2 = tor_manager.addOnion() - peer = Peer(file_server.ip, 1544, connection_server=file_server) - hash1 = hashlib.sha256("site1").digest() - hash2 = hashlib.sha256("site2").digest() - hash3 = hashlib.sha256("site3").digest() - - bootstrapper_db.peerAnnounce(ip_type="ipv4", address="1.2.3.4", port=1234, hashes=[hash1, hash2, hash3]) - res = peer.request("announce", { - "onions": [onion1, onion1, onion2], - "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] - }) - assert len(res["peers"][0]["ipv4"]) == 1 - - # Onion address not added yet - site_peers = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) - assert len(site_peers["onion"]) == 0 - assert "onion_sign_this" in res - - # Sign the nonces - sign1 = CryptRsa.sign(res["onion_sign_this"], tor_manager.getPrivatekey(onion1)) - sign2 = CryptRsa.sign(res["onion_sign_this"], tor_manager.getPrivatekey(onion2)) - - # Bad sign (different address) - res = peer.request("announce", { - "onions": [onion1], "onion_sign_this": res["onion_sign_this"], - "onion_signs": {tor_manager.getPublickey(onion2): sign2}, - "hashes": [hash1], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] - }) - assert "onion_sign_this" in res - site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) - assert len(site_peers1["onion"]) == 0 # Not added - - # Bad sign (missing one) - res = peer.request("announce", { - "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], - "onion_signs": {tor_manager.getPublickey(onion1): sign1}, - "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] - }) - assert "onion_sign_this" in res - site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) - assert len(site_peers1["onion"]) == 0 # Not added - - # Good sign - res = peer.request("announce", { - "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], - "onion_signs": {tor_manager.getPublickey(onion1): sign1, tor_manager.getPublickey(onion2): sign2}, - "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] - }) - assert "onion_sign_this" not in res - - # Onion addresses added - site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) - assert len(site_peers1["onion"]) == 1 - site_peers2 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash2) - assert len(site_peers2["onion"]) == 1 - site_peers3 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash3) - assert len(site_peers3["onion"]) == 1 - - assert site_peers1["onion"][0] == site_peers2["onion"][0] - assert site_peers2["onion"][0] != site_peers3["onion"][0] - assert helper.unpackOnionAddress(site_peers1["onion"][0])[0] == onion1 + ".onion" - assert helper.unpackOnionAddress(site_peers2["onion"][0])[0] == onion1 + ".onion" - assert helper.unpackOnionAddress(site_peers3["onion"][0])[0] == onion2 + ".onion" - - tor_manager.delOnion(onion1) - tor_manager.delOnion(onion2) - - def testRequestPeers(self, file_server, site, bootstrapper_db, tor_manager): - site.connection_server = file_server - file_server.tor_manager = tor_manager - hash = hashlib.sha256(site.address).digest() - - # Request peers from tracker - assert len(site.peers) == 0 - bootstrapper_db.peerAnnounce(ip_type="ipv4", address="1.2.3.4", port=1234, hashes=[hash]) - site.announcer.announceTracker("zero://%s:%s" % (file_server.ip, file_server.port)) - assert len(site.peers) == 1 - - # Test onion address store - bootstrapper_db.peerAnnounce(ip_type="onion", address="bka4ht2bzxchy44r", port=1234, hashes=[hash], onion_signed=True) - site.announcer.announceTracker("zero://%s:%s" % (file_server.ip, file_server.port)) - assert len(site.peers) == 2 - assert "bka4ht2bzxchy44r.onion:1234" in site.peers - - @pytest.mark.slow - def testAnnounce(self, file_server, tor_manager): - file_server.tor_manager = tor_manager - hash1 = hashlib.sha256("1Nekos4fiBqfcazyG1bAxdBT5oBvA76Z").digest() - hash2 = hashlib.sha256("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr").digest() - peer = Peer("zero.booth.moe", 443, connection_server=file_server) - assert peer.request("ping") - peer = Peer("boot3rdez4rzn36x.onion", 15441, connection_server=file_server) - assert peer.request("ping") - res = peer.request("announce", { - "hashes": [hash1, hash2], - "port": 15441, "need_types": ["ip4", "onion"], "need_num": 100, "add": [""] - }) - - assert res - - def testBackwardCompatibility(self, file_server, bootstrapper_db): - peer = Peer(file_server.ip, 1544, connection_server=file_server) - hash1 = hashlib.sha256("site1").digest() - - bootstrapper_db.peerAnnounce("ipv4", file_server.ip_external, port=15441, hashes=[hash1], delete_missing_hashes=True) - - # Test with ipv4 need type - res = peer.request("announce", { - "hashes": [hash1], - "port": 15441, "need_types": ["ipv4"], "need_num": 10, "add": [] - }) - - assert len(res["peers"][0]["ipv4"]) == 1 - - # Test with ip4 need type - res = peer.request("announce", { - "hashes": [hash1], - "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] - }) - - assert len(res["peers"][0]["ip4"]) == 1 diff --git a/plugins/disabled-Bootstrapper/Test/conftest.py b/plugins/disabled-Bootstrapper/Test/conftest.py deleted file mode 100644 index 8c1df5b2..00000000 --- a/plugins/disabled-Bootstrapper/Test/conftest.py +++ /dev/null @@ -1 +0,0 @@ -from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/disabled-Bootstrapper/Test/pytest.ini b/plugins/disabled-Bootstrapper/Test/pytest.ini deleted file mode 100644 index d09210d1..00000000 --- a/plugins/disabled-Bootstrapper/Test/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -python_files = Test*.py -addopts = -rsxX -v --durations=6 -markers = - webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/disabled-Bootstrapper/__init__.py b/plugins/disabled-Bootstrapper/__init__.py deleted file mode 100644 index ca533eac..00000000 --- a/plugins/disabled-Bootstrapper/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import BootstrapperPlugin \ No newline at end of file diff --git a/plugins/disabled-Dnschain/SiteManagerPlugin.py b/plugins/disabled-Dnschain/SiteManagerPlugin.py deleted file mode 100644 index a5122ec1..00000000 --- a/plugins/disabled-Dnschain/SiteManagerPlugin.py +++ /dev/null @@ -1,153 +0,0 @@ -import logging, json, os, re, sys, time -import gevent -from Plugin import PluginManager -from Config import config -from util import Http -from Debug import Debug - -allow_reload = False # No reload supported - -log = logging.getLogger("DnschainPlugin") - -@PluginManager.registerTo("SiteManager") -class SiteManagerPlugin(object): - dns_cache_path = "%s/dns_cache.json" % config.data_dir - dns_cache = None - - # Checks if its a valid address - def isAddress(self, address): - if self.isDomain(address): - return True - else: - return super(SiteManagerPlugin, self).isAddress(address) - - - # Return: True if the address is domain - def isDomain(self, address): - return re.match(r"(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) - - - # Load dns entries from data/dns_cache.json - def loadDnsCache(self): - if os.path.isfile(self.dns_cache_path): - self.dns_cache = json.load(open(self.dns_cache_path)) - else: - self.dns_cache = {} - log.debug("Loaded dns cache, entries: %s" % len(self.dns_cache)) - - - # Save dns entries to data/dns_cache.json - def saveDnsCache(self): - json.dump(self.dns_cache, open(self.dns_cache_path, "wb"), indent=2) - - - # Resolve domain using dnschain.net - # Return: The address or None - def resolveDomainDnschainNet(self, domain): - try: - match = self.isDomain(domain) - sub_domain = match.group(1).strip(".") - top_domain = match.group(2) - if not sub_domain: sub_domain = "@" - address = None - with gevent.Timeout(5, Exception("Timeout: 5s")): - res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read() - data = json.loads(res)["data"]["value"] - if "zeronet" in data: - for key, val in data["zeronet"].iteritems(): - self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours - self.saveDnsCache() - return data["zeronet"].get(sub_domain) - # Not found - return address - except Exception as err: - log.debug("Dnschain.net %s resolve error: %s" % (domain, Debug.formatException(err))) - - - # Resolve domain using dnschain.info - # Return: The address or None - def resolveDomainDnschainInfo(self, domain): - try: - match = self.isDomain(domain) - sub_domain = match.group(1).strip(".") - top_domain = match.group(2) - if not sub_domain: sub_domain = "@" - address = None - with gevent.Timeout(5, Exception("Timeout: 5s")): - res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read() - data = json.loads(res)["value"] - for key, val in data["zeronet"].iteritems(): - self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours - self.saveDnsCache() - return data["zeronet"].get(sub_domain) - # Not found - return address - except Exception as err: - log.debug("Dnschain.info %s resolve error: %s" % (domain, Debug.formatException(err))) - - - # Resolve domain - # Return: The address or None - def resolveDomain(self, domain): - domain = domain.lower() - if self.dns_cache == None: - self.loadDnsCache() - if domain.count(".") < 2: # Its a topleved request, prepend @. to it - domain = "@."+domain - - domain_details = self.dns_cache.get(domain) - if domain_details and time.time() < domain_details[1]: # Found in cache and its not expired - return domain_details[0] - else: - # Resovle dns using dnschain - thread_dnschain_info = gevent.spawn(self.resolveDomainDnschainInfo, domain) - thread_dnschain_net = gevent.spawn(self.resolveDomainDnschainNet, domain) - gevent.joinall([thread_dnschain_net, thread_dnschain_info]) # Wait for finish - - if thread_dnschain_info.value and thread_dnschain_net.value: # Booth successfull - if thread_dnschain_info.value == thread_dnschain_net.value: # Same returned value - return thread_dnschain_info.value - else: - log.error("Dns %s missmatch: %s != %s" % (domain, thread_dnschain_info.value, thread_dnschain_net.value)) - - # Problem during resolve - if domain_details: # Resolve failed, but we have it in the cache - domain_details[1] = time.time()+60*60 # Dont try again for 1 hour - return domain_details[0] - else: # Not found in cache - self.dns_cache[domain] = [None, time.time()+60] # Don't check again for 1 min - return None - - - # Return or create site and start download site files - # Return: Site or None if dns resolve failed - def need(self, address, all_file=True): - if self.isDomain(address): # Its looks like a domain - address_resolved = self.resolveDomain(address) - if address_resolved: - address = address_resolved - else: - return None - - return super(SiteManagerPlugin, self).need(address, all_file) - - - # Return: Site object or None if not found - def get(self, address): - if self.sites == None: # Not loaded yet - self.load() - if self.isDomain(address): # Its looks like a domain - address_resolved = self.resolveDomain(address) - if address_resolved: # Domain found - site = self.sites.get(address_resolved) - if site: - site_domain = site.settings.get("domain") - if site_domain != address: - site.settings["domain"] = address - else: # Domain not found - site = self.sites.get(address) - - else: # Access by site address - site = self.sites.get(address) - return site - diff --git a/plugins/disabled-Dnschain/UiRequestPlugin.py b/plugins/disabled-Dnschain/UiRequestPlugin.py deleted file mode 100644 index 8ab9d5c5..00000000 --- a/plugins/disabled-Dnschain/UiRequestPlugin.py +++ /dev/null @@ -1,34 +0,0 @@ -import re -from Plugin import PluginManager - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - def __init__(self, server = None): - from Site import SiteManager - self.site_manager = SiteManager.site_manager - super(UiRequestPlugin, self).__init__(server) - - - # Media request - def actionSiteMedia(self, path): - match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) - if match: # Its a valid domain, resolve first - domain = match.group("address") - address = self.site_manager.resolveDomain(domain) - if address: - path = "/media/"+address+match.group("inner_path") - return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output - - - # Is mediarequest allowed from that referer - def isMediaRequestAllowed(self, site_address, referer): - referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address - referer_site_address = re.match(r"/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") - - if referer_site_address == site_address: # Referer site address as simple address - return True - elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns - return True - else: # Invalid referer - return False - diff --git a/plugins/disabled-Dnschain/__init__.py b/plugins/disabled-Dnschain/__init__.py deleted file mode 100644 index 2b36af5d..00000000 --- a/plugins/disabled-Dnschain/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This plugin is experimental, if you really want to enable uncomment the following lines: -# import DnschainPlugin -# import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-DonationMessage/DonationMessagePlugin.py b/plugins/disabled-DonationMessage/DonationMessagePlugin.py deleted file mode 100644 index 8cf0d541..00000000 --- a/plugins/disabled-DonationMessage/DonationMessagePlugin.py +++ /dev/null @@ -1,22 +0,0 @@ -import re -from Plugin import PluginManager - -# Warning: If you modify the donation address then renmae the plugin's directory to "MyDonationMessage" to prevent the update script overwrite - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - # Inject a donation message to every page top right corner - def renderWrapper(self, *args, **kwargs): - body = super(UiRequestPlugin, self).renderWrapper(*args, **kwargs) # Get the wrapper frame output - - inject_html = """ - - Please donate to help to keep this ZeroProxy alive - - - """ - - return re.sub(r"\s*\s*$", inject_html, body) diff --git a/plugins/disabled-DonationMessage/__init__.py b/plugins/disabled-DonationMessage/__init__.py deleted file mode 100644 index f8dcae2f..00000000 --- a/plugins/disabled-DonationMessage/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import DonationMessagePlugin diff --git a/plugins/disabled-Multiuser/MultiuserPlugin.py b/plugins/disabled-Multiuser/MultiuserPlugin.py deleted file mode 100644 index e3e4b54c..00000000 --- a/plugins/disabled-Multiuser/MultiuserPlugin.py +++ /dev/null @@ -1,227 +0,0 @@ -import re -import sys -import json - -from Config import config -from Plugin import PluginManager -from Crypt import CryptBitcoin -import UserPlugin - -try: - local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json -except Exception, err: - local_master_addresses = set() - - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - def __init__(self, *args, **kwargs): - self.user_manager = sys.modules["User.UserManager"].user_manager - super(UiRequestPlugin, self).__init__(*args, **kwargs) - - # Create new user and inject user welcome message if necessary - # Return: Html body also containing the injection - def actionWrapper(self, path, extra_headers=None): - - match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) - if not match: - return False - - inner_path = match.group("inner_path").lstrip("/") - html_request = "." not in inner_path or inner_path.endswith(".html") # Only inject html to html requests - - user_created = False - if html_request: - user = self.getCurrentUser() # Get user from cookie - if not user: # No user found by cookie - user = self.user_manager.create() - user_created = True - else: - user = None - - # Disable new site creation if --multiuser_no_new_sites enabled - if config.multiuser_no_new_sites: - path_parts = self.parsePath(path) - if not self.server.site_manager.get(match.group("address")) and (not user or user.master_address not in local_master_addresses): - self.sendHeader(404) - return self.formatError("Not Found", "Adding new sites disabled on this proxy", details=False) - - if user_created: - if not extra_headers: - extra_headers = {} - extra_headers['Set-Cookie'] = "master_address=%s;path=/;max-age=2592000;" % user.master_address # = 30 days - - loggedin = self.get.get("login") == "done" - - back_generator = super(UiRequestPlugin, self).actionWrapper(path, extra_headers) # Get the wrapper frame output - - if not back_generator: # Wrapper error or not string returned, injection not possible - return False - - elif loggedin: - back = back_generator.next() - inject_html = """ - - - - - """.replace("\t", "") - if user.master_address in local_master_addresses: - message = "Hello master!" - else: - message = "Hello again!" - inject_html = inject_html.replace("{message}", message) - inject_html = inject_html.replace("{script_nonce}", self.getScriptNonce()) - return iter([re.sub("\s*\s*$", inject_html, back)]) # Replace the tags with the injection - - else: # No injection necessary - return back_generator - - # Get the current user based on request's cookies - # Return: User object or None if no match - def getCurrentUser(self): - cookies = self.getCookies() - user = None - if "master_address" in cookies: - users = self.user_manager.list() - user = users.get(cookies["master_address"]) - return user - - -@PluginManager.registerTo("UiWebsocket") -class UiWebsocketPlugin(object): - def __init__(self, *args, **kwargs): - self.multiuser_denied_cmds = ( - "sitePause", "siteResume", "siteDelete", "configSet", "serverShutdown", "serverUpdate", "siteClone", - "siteSetOwned", "siteSetAutodownloadoptional", "dbReload", "dbRebuild", - "mergerSiteDelete", "siteSetLimit", "siteSetAutodownloadBigfileLimit", - "optionalLimitSet", "optionalHelp", "optionalHelpRemove", "optionalHelpAll", "optionalFilePin", "optionalFileUnpin", "optionalFileDelete", - "muteAdd", "muteRemove", "siteblockAdd", "siteblockRemove", "filterIncludeAdd", "filterIncludeRemove" - ) - if config.multiuser_no_new_sites: - self.multiuser_denied_cmds += ("mergerSiteAdd", ) - - super(UiWebsocketPlugin, self).__init__(*args, **kwargs) - - # Let the page know we running in multiuser mode - def formatServerInfo(self): - server_info = super(UiWebsocketPlugin, self).formatServerInfo() - server_info["multiuser"] = True - if "ADMIN" in self.site.settings["permissions"]: - server_info["master_address"] = self.user.master_address - return server_info - - # Show current user's master seed - def actionUserShowMasterSeed(self, to): - if "ADMIN" not in self.site.settings["permissions"]: - return self.response(to, "Show master seed not allowed") - message = "Your unique private key:" - message += "
    %s
    " % self.user.master_seed - message += "(Save it, you can access your account using this information)" - self.cmd("notification", ["info", message]) - - # Logout user - def actionUserLogout(self, to): - if "ADMIN" not in self.site.settings["permissions"]: - return self.response(to, "Logout not allowed") - message = "You have been logged out. Login to another account" - self.cmd("notification", ["done", message, 1000000]) # 1000000 = Show ~forever :) - - script = "document.cookie = 'master_address=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/';" - script += "$('#button_notification').on('click', function() { zeroframe.cmd(\"userLoginForm\", []); });" - self.cmd("injectScript", script) - # Delete from user_manager - user_manager = sys.modules["User.UserManager"].user_manager - if self.user.master_address in user_manager.users: - if not config.multiuser_local: - del user_manager.users[self.user.master_address] - self.response(to, "Successful logout") - else: - self.response(to, "User not found") - - # Show login form - def actionUserLoginForm(self, to): - self.cmd("prompt", ["Login
    Your private key:", "password", "Login"], self.responseUserLogin) - - # Login form submit - def responseUserLogin(self, master_seed): - user_manager = sys.modules["User.UserManager"].user_manager - user = user_manager.get(CryptBitcoin.privatekeyToAddress(master_seed)) - if not user: - user = user_manager.create(master_seed=master_seed) - if user.master_address: - script = "document.cookie = 'master_address=%s;path=/;max-age=2592000;';" % user.master_address - script += "zeroframe.cmd('wrapperReload', ['login=done']);" - self.cmd("notification", ["done", "Successful login, reloading page..."]) - self.cmd("injectScript", script) - else: - self.cmd("notification", ["error", "Error: Invalid master seed"]) - self.actionUserLoginForm(0) - - def hasCmdPermission(self, cmd): - cmd = cmd[0].lower() + cmd[1:] - if not config.multiuser_local and self.user.master_address not in local_master_addresses and cmd in self.multiuser_denied_cmds: - self.cmd("notification", ["info", "This function is disabled on this proxy!"]) - return False - else: - return super(UiWebsocketPlugin, self).hasCmdPermission(cmd) - - def actionCertAdd(self, *args, **kwargs): - super(UiWebsocketPlugin, self).actionCertAdd(*args, **kwargs) - master_seed = self.user.master_seed - message = """ - - Hello, welcome to ZeroProxy!
    A new, unique account created for you:
    - - -
    - This is your private key, save it, so you can login next time.
    - Warning: Without this key, your account will be lost forever! -

    - Ok, Saved it!

    - This site allows you to browse ZeroNet content, but if you want to secure your account
    - and help to keep the network alive, then please run your own ZeroNet client.
    - """ - - self.cmd("notification", ["info", message]) - - script = """ - $("#button_notification_masterseed").on("click", function() { - this.value = "{master_seed}"; this.setSelectionRange(0,100); - }) - $("#button_notification_download").on("mousedown", function() { - this.href = window.URL.createObjectURL(new Blob(["ZeroNet user master seed:\\r\\n{master_seed}"])) - }) - """.replace("{master_seed}", master_seed) - self.cmd("injectScript", script) - - - def actionPermissionAdd(self, to, permission): - if permission == "NOSANDBOX": - self.cmd("notification", ["info", "You can't disable sandbox on this proxy!"]) - self.response(to, {"error": "Denied by proxy"}) - return False - else: - return super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission) - - -@PluginManager.registerTo("ConfigPlugin") -class ConfigPlugin(object): - def createArguments(self): - group = self.parser.add_argument_group("Multiuser plugin") - group.add_argument('--multiuser_local', help="Enable unsafe Ui functions and write users to disk", action='store_true') - group.add_argument('--multiuser_no_new_sites', help="Denies adding new sites by normal users", action='store_true') - - return super(ConfigPlugin, self).createArguments() diff --git a/plugins/disabled-Multiuser/UserPlugin.py b/plugins/disabled-Multiuser/UserPlugin.py deleted file mode 100644 index 3c9ebae8..00000000 --- a/plugins/disabled-Multiuser/UserPlugin.py +++ /dev/null @@ -1,35 +0,0 @@ -from Config import config -from Plugin import PluginManager - -allow_reload = False - -@PluginManager.registerTo("UserManager") -class UserManagerPlugin(object): - def load(self): - if not config.multiuser_local: - # In multiuser mode do not load the users - if not self.users: - self.users = {} - return self.users - else: - return super(UserManagerPlugin, self).load() - - # Find user by master address - # Return: User or None - def get(self, master_address=None): - users = self.list() - if master_address in users: - user = users[master_address] - else: - user = None - return user - - -@PluginManager.registerTo("User") -class UserPlugin(object): - # In multiuser mode users data only exits in memory, dont write to data/user.json - def save(self): - if not config.multiuser_local: - return False - else: - return super(UserPlugin, self).save() diff --git a/plugins/disabled-Multiuser/__init__.py b/plugins/disabled-Multiuser/__init__.py deleted file mode 100644 index 154d6008..00000000 --- a/plugins/disabled-Multiuser/__init__.py +++ /dev/null @@ -1 +0,0 @@ -import MultiuserPlugin diff --git a/plugins/disabled-StemPort/StemPortPlugin.py b/plugins/disabled-StemPort/StemPortPlugin.py deleted file mode 100644 index 3a3787c7..00000000 --- a/plugins/disabled-StemPort/StemPortPlugin.py +++ /dev/null @@ -1,135 +0,0 @@ -import logging -import traceback - -import socket -import stem -from stem import Signal -from stem.control import Controller -from stem.socket import ControlPort - -from Plugin import PluginManager -from Config import config -from Debug import Debug - -if config.tor != "disable": - from gevent import monkey - monkey.patch_time() - monkey.patch_socket(dns=False) - monkey.patch_thread() - print "Stem Port Plugin: modules are patched." -else: - print "Stem Port Plugin: Tor mode disabled. Module patching skipped." - - -class PatchedControlPort(ControlPort): - def _make_socket(self): - try: - if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one - control_socket = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) - else: - control_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - - # TODO: repeated code - consider making a separate method - - control_socket.connect((self._control_addr, self._control_port)) - return control_socket - except socket.error as exc: - raise stem.SocketError(exc) - -def from_port(address = '127.0.0.1', port = 'default'): - import stem.connection - - if not stem.util.connection.is_valid_ipv4_address(address): - raise ValueError('Invalid IP address: %s' % address) - elif port != 'default' and not stem.util.connection.is_valid_port(port): - raise ValueError('Invalid port: %s' % port) - - if port == 'default': - raise ValueError('Must specify a port') - else: - control_port = PatchedControlPort(address, port) - - return Controller(control_port) - - -@PluginManager.registerTo("TorManager") -class TorManagerPlugin(object): - - def connectController(self): - self.log.info("Authenticate using Stem... %s:%s" % (self.ip, self.port)) - - try: - with self.lock: - if config.tor_password: - controller = from_port(port=self.port, password=config.tor_password) - else: - controller = from_port(port=self.port) - controller.authenticate() - self.controller = controller - self.status = u"Connected (via Stem)" - except Exception, err: - print("\n") - traceback.print_exc() - print("\n") - - self.controller = None - self.status = u"Error (%s)" % err - self.log.error("Tor stem connect error: %s" % Debug.formatException(err)) - - return self.controller - - - def disconnect(self): - self.controller.close() - self.controller = None - - - def resetCircuits(self): - try: - self.controller.signal(Signal.NEWNYM) - except Exception, err: - self.status = u"Stem reset circuits error (%s)" % err - self.log.error("Stem reset circuits error: %s" % err) - - - def makeOnionAndKey(self): - try: - service = self.controller.create_ephemeral_hidden_service( - {self.fileserver_port: self.fileserver_port}, - await_publication = False - ) - if service.private_key_type != "RSA1024": - raise Exception("ZeroNet doesn't support crypto " + service.private_key_type) - - self.log.debug("Stem created %s.onion (async descriptor publication)" % service.service_id) - - return (service.service_id, service.private_key) - - except Exception, err: - self.status = u"AddOnion error (Stem: %s)" % err - self.log.error("Failed to create hidden service with Stem: " + err) - return False - - - def delOnion(self, address): - try: - self.controller.remove_ephemeral_hidden_service(address) - return True - except Exception, err: - self.status = u"DelOnion error (Stem: %s)" % err - self.log.error("Stem failed to delete %s.onion: %s" % (address, err)) - self.disconnect() # Why? - return False - - - def request(self, cmd): - with self.lock: - if not self.enabled: - return False - else: - self.log.error("[WARNING] StemPort self.request should not be called") - return "" - - def send(self, cmd, conn=None): - self.log.error("[WARNING] StemPort self.send should not be called") - return "" diff --git a/plugins/disabled-StemPort/__init__.py b/plugins/disabled-StemPort/__init__.py deleted file mode 100644 index 71150ad6..00000000 --- a/plugins/disabled-StemPort/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -try: - from stem.control import Controller - stem_found = True -except Exception as err: - print "STEM NOT FOUND! %s" % err - stem_found = False - -if stem_found: - print "Starting Stem plugin..." - import StemPortPlugin diff --git a/plugins/disabled-UiPassword/UiPasswordPlugin.py b/plugins/disabled-UiPassword/UiPasswordPlugin.py deleted file mode 100644 index 6e746fd4..00000000 --- a/plugins/disabled-UiPassword/UiPasswordPlugin.py +++ /dev/null @@ -1,134 +0,0 @@ -import string -import random -import time -import json -import re - -from Config import config -from Plugin import PluginManager - -if "sessions" not in locals().keys(): # To keep sessions between module reloads - sessions = {} - - -def showPasswordAdvice(password): - error_msgs = [] - if not password or not isinstance(password, (str, unicode)): - error_msgs.append("You have enabled UiPassword plugin, but you forgot to set a password!") - elif len(password) < 8: - error_msgs.append("You are using a very short UI password!") - return error_msgs - -@PluginManager.registerTo("UiRequest") -class UiRequestPlugin(object): - sessions = sessions - last_cleanup = time.time() - - def route(self, path): - # Restict Ui access by ip - if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: - return self.error403(details=False) - if path.endswith("favicon.ico"): - return self.actionFile("src/Ui/media/img/favicon.ico") - else: - if config.ui_password: - if time.time() - self.last_cleanup > 60 * 60: # Cleanup expired sessions every hour - self.cleanup() - # Validate session - session_id = self.getCookies().get("session_id") - if session_id not in self.sessions: # Invalid session id, display login - return self.actionLogin() - return super(UiRequestPlugin, self).route(path) - - # Action: Login - def actionLogin(self): - template = open("plugins/UiPassword/login.html").read() - self.sendHeader() - posted = self.getPosted() - if posted: # Validate http posted data - if self.checkPassword(posted.get("password")): - # Valid password, create session - session_id = self.randomString(26) - self.sessions[session_id] = { - "added": time.time(), - "keep": posted.get("keep") - } - - # Redirect to homepage or referer - url = self.env.get("HTTP_REFERER", "") - if not url or re.sub(r"\?.*", "", url).endswith("/Login"): - url = "/" + config.homepage - cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days - self.start_response('301 Redirect', [('Location', url), cookie_header]) - yield "Redirecting..." - - else: - # Invalid password, show login form again - template = template.replace("{result}", "bad_password") - yield template - - def checkPassword(self, password): - return password == config.ui_password - - def randomString(self, nchars): - return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(nchars)) - - @classmethod - def cleanup(cls): - cls.last_cleanup = time.time() - for session_id, session in cls.sessions.items(): - if session["keep"] and time.time() - session["added"] > 60 * 60 * 24 * 60: # Max 60days for keep sessions - del(cls.sessions[session_id]) - elif not session["keep"] and time.time() - session["added"] > 60 * 60 * 24: # Max 24h for non-keep sessions - del(cls.sessions[session_id]) - - # Action: Display sessions - def actionSessions(self): - self.sendHeader() - yield "
    "
    -        yield json.dumps(self.sessions, indent=4)
    -
    -    # Action: Logout
    -    def actionLogout(self):
    -        # Session id has to passed as get parameter or called without referer to avoid remote logout
    -        session_id = self.getCookies().get("session_id")
    -        if not self.env.get("HTTP_REFERER") or session_id == self.get.get("session_id"):
    -            if session_id in self.sessions:
    -                del self.sessions[session_id]
    -            self.start_response('301 Redirect', [
    -                ('Location', "/"),
    -                ('Set-Cookie', "session_id=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT")
    -            ])
    -            yield "Redirecting..."
    -        else:
    -            self.sendHeader()
    -            yield "Error: Invalid session id"
    -
    -
    -
    -@PluginManager.registerTo("ConfigPlugin")
    -class ConfigPlugin(object):
    -    def createArguments(self):
    -        group = self.parser.add_argument_group("UiPassword plugin")
    -        group.add_argument('--ui_password', help='Password to access UiServer', default=None, metavar="password")
    -
    -        return super(ConfigPlugin, self).createArguments()
    -
    -
    -from Translate import translate as lang
    -@PluginManager.registerTo("UiWebsocket")
    -class UiWebsocketPlugin(object):
    -    def actionUiLogout(self, to):
    -        permissions = self.getPermissions(to)
    -        if "ADMIN" not in permissions:
    -            return self.response(to, "You don't have permission to run this command")
    -
    -        session_id = self.request.getCookies().get("session_id", "")
    -        self.cmd("redirect", '/Logout?session_id=%s' % session_id)
    -
    -    def addHomepageNotifications(self):
    -        error_msgs = showPasswordAdvice(config.ui_password)
    -        for msg in error_msgs:
    -            self.site.notifications.append(["error", lang[msg]])
    -
    -        return super(UiWebsocketPlugin, self).addHomepageNotifications()
    diff --git a/plugins/disabled-UiPassword/__init__.py b/plugins/disabled-UiPassword/__init__.py
    deleted file mode 100644
    index 37350c3d..00000000
    --- a/plugins/disabled-UiPassword/__init__.py
    +++ /dev/null
    @@ -1 +0,0 @@
    -import UiPasswordPlugin
    \ No newline at end of file
    diff --git a/plugins/disabled-UiPassword/login.html b/plugins/disabled-UiPassword/login.html
    deleted file mode 100644
    index 12d0889d..00000000
    --- a/plugins/disabled-UiPassword/login.html
    +++ /dev/null
    @@ -1,116 +0,0 @@
    -
    -
    - Log In
    - 
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    diff --git a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py
    deleted file mode 100644
    index e8fc8610..00000000
    --- a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py
    +++ /dev/null
    @@ -1,68 +0,0 @@
    -import logging, json, os, re, sys, time
    -import gevent
    -from Plugin import PluginManager
    -from Config import config
    -from Debug import Debug
    -from domainLookup import lookupDomain
    -
    -allow_reload = False # No reload supported
    -
    -log = logging.getLogger("Zeroname-localPlugin")
    -
    -
    -@PluginManager.registerTo("SiteManager")
    -class SiteManagerPlugin(object):
    -    def load(self):
    -        super(SiteManagerPlugin, self).load()
    -
    -    # Checks if its a valid address
    -    def isAddress(self, address):
    -        if self.isDomain(address): 
    -            return True
    -        else:
    -            return super(SiteManagerPlugin, self).isAddress(address)
    -
    -
    -    # Return: True if the address is domain
    -    def isDomain(self, address):
    -        return re.match(r"(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
    -
    -
    -    # Resolve domain
    -    # Return: The address or None
    -    def resolveDomain(self, domain):
    -        return lookupDomain(domain)
    -
    -
    -    # Return or create site and start download site files
    -    # Return: Site or None if dns resolve failed
    -    def need(self, address, all_file=True):
    -        if self.isDomain(address): # Its looks like a domain
    -            address_resolved = self.resolveDomain(address)
    -            if address_resolved:
    -                address = address_resolved
    -            else:
    -                return None
    -        
    -        return super(SiteManagerPlugin, self).need(address, all_file)
    -
    -
    -    # Return: Site object or None if not found
    -    def get(self, address):
    -        if self.sites == None: # Not loaded yet
    -            self.load()
    -        if self.isDomain(address): # Its looks like a domain
    -            address_resolved = self.resolveDomain(address)
    -            if address_resolved: # Domain found
    -                site = self.sites.get(address_resolved)
    -                if site:
    -                    site_domain = site.settings.get("domain")
    -                    if site_domain != address:
    -                        site.settings["domain"] = address
    -            else: # Domain not found
    -                site = self.sites.get(address)
    -
    -        else: # Access by site address
    -            site = self.sites.get(address)
    -        return site
    -
    diff --git a/plugins/disabled-Zeroname-local/UiRequestPlugin.py b/plugins/disabled-Zeroname-local/UiRequestPlugin.py
    deleted file mode 100644
    index df37e066..00000000
    --- a/plugins/disabled-Zeroname-local/UiRequestPlugin.py
    +++ /dev/null
    @@ -1,40 +0,0 @@
    -import re
    -from Plugin import PluginManager
    -
    -@PluginManager.registerTo("UiRequest")
    -class UiRequestPlugin(object):
    -    def __init__(self, *args, **kwargs):
    -        from Site import SiteManager
    -        self.site_manager = SiteManager.site_manager
    -        super(UiRequestPlugin, self).__init__(*args, **kwargs)
    -
    -
    -    # Media request
    -    def actionSiteMedia(self, path):
    -        match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) - if match: # Its a valid domain, resolve first - domain = match.group("address") - address = self.site_manager.resolveDomain(domain) - if address: - path = "/media/"+address+match.group("inner_path") - return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output - - - # Is mediarequest allowed from that referer - def isMediaRequestAllowed(self, site_address, referer): - referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address - referer_path = re.sub(r"\?.*", "", referer_path) # Remove http params - - if self.isProxyRequest(): # Match to site domain - referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access - referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1) - else: # Match to request path - referer_site_address = re.match(r"/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") - - if referer_site_address == site_address: # Referer site address as simple address - return True - elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns - return True - else: # Invalid referer - return False - diff --git a/plugins/disabled-Zeroname-local/__init__.py b/plugins/disabled-Zeroname-local/__init__.py deleted file mode 100644 index 889802db..00000000 --- a/plugins/disabled-Zeroname-local/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -import UiRequestPlugin -import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-Zeroname-local/bitcoinrpc/__init__.py b/plugins/disabled-Zeroname-local/bitcoinrpc/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py b/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py deleted file mode 100644 index 52cdb107..00000000 --- a/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py +++ /dev/null @@ -1,190 +0,0 @@ - -""" - Copyright 2011 Jeff Garzik - - AuthServiceProxy has the following improvements over python-jsonrpc's - ServiceProxy class: - - - HTTP connections persist for the life of the AuthServiceProxy object - (if server supports HTTP/1.1) - - sends protocol 'version', per JSON-RPC 1.1 - - sends proper, incrementing 'id' - - sends Basic HTTP authentication headers - - parses all JSON numbers that look like floats as Decimal - - uses standard Python json lib - - Previous copyright, from python-jsonrpc/jsonrpc/proxy.py: - - Copyright (c) 2007 Jan-Klaas Kollhof - - This file is part of jsonrpc. - - jsonrpc is free software; you can redistribute it and/or modify - it under the terms of the GNU Lesser General Public License as published by - the Free Software Foundation; either version 2.1 of the License, or - (at your option) any later version. - - This software is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public License - along with this software; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -""" - -try: - import http.client as httplib -except ImportError: - import httplib -import base64 -import decimal -import json -import logging -try: - import urllib.parse as urlparse -except ImportError: - import urlparse - -USER_AGENT = "AuthServiceProxy/0.1" - -HTTP_TIMEOUT = 30 - -log = logging.getLogger("BitcoinRPC") - -class JSONRPCException(Exception): - def __init__(self, rpc_error): - parent_args = [] - try: - parent_args.append(rpc_error['message']) - except: - pass - Exception.__init__(self, *parent_args) - self.error = rpc_error - self.code = rpc_error['code'] if 'code' in rpc_error else None - self.message = rpc_error['message'] if 'message' in rpc_error else None - - def __str__(self): - return '%d: %s' % (self.code, self.message) - - def __repr__(self): - return '<%s \'%s\'>' % (self.__class__.__name__, self) - - -def EncodeDecimal(o): - if isinstance(o, decimal.Decimal): - return float(round(o, 8)) - raise TypeError(repr(o) + " is not JSON serializable") - -class AuthServiceProxy(object): - __id_count = 0 - - def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None): - self.__service_url = service_url - self.__service_name = service_name - self.__url = urlparse.urlparse(service_url) - if self.__url.port is None: - port = 80 - else: - port = self.__url.port - (user, passwd) = (self.__url.username, self.__url.password) - try: - user = user.encode('utf8') - except AttributeError: - pass - try: - passwd = passwd.encode('utf8') - except AttributeError: - pass - authpair = user + b':' + passwd - self.__auth_header = b'Basic ' + base64.b64encode(authpair) - - self.__timeout = timeout - - if connection: - # Callables re-use the connection of the original proxy - self.__conn = connection - elif self.__url.scheme == 'https': - self.__conn = httplib.HTTPSConnection(self.__url.hostname, port, - timeout=timeout) - else: - self.__conn = httplib.HTTPConnection(self.__url.hostname, port, - timeout=timeout) - - def __getattr__(self, name): - if name.startswith('__') and name.endswith('__'): - # Python internal stuff - raise AttributeError - if self.__service_name is not None: - name = "%s.%s" % (self.__service_name, name) - return AuthServiceProxy(self.__service_url, name, self.__timeout, self.__conn) - - def __call__(self, *args): - AuthServiceProxy.__id_count += 1 - - log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name, - json.dumps(args, default=EncodeDecimal))) - postdata = json.dumps({'version': '1.1', - 'method': self.__service_name, - 'params': args, - 'id': AuthServiceProxy.__id_count}, default=EncodeDecimal) - self.__conn.request('POST', self.__url.path, postdata, - {'Host': self.__url.hostname, - 'User-Agent': USER_AGENT, - 'Authorization': self.__auth_header, - 'Content-type': 'application/json'}) - self.__conn.sock.settimeout(self.__timeout) - - response = self._get_response() - if response.get('error') is not None: - raise JSONRPCException(response['error']) - elif 'result' not in response: - raise JSONRPCException({ - 'code': -343, 'message': 'missing JSON-RPC result'}) - - return response['result'] - - def batch_(self, rpc_calls): - """Batch RPC call. - Pass array of arrays: [ [ "method", params... ], ... ] - Returns array of results. - """ - batch_data = [] - for rpc_call in rpc_calls: - AuthServiceProxy.__id_count += 1 - m = rpc_call.pop(0) - batch_data.append({"jsonrpc":"2.0", "method":m, "params":rpc_call, "id":AuthServiceProxy.__id_count}) - - postdata = json.dumps(batch_data, default=EncodeDecimal) - log.debug("--> "+postdata) - self.__conn.request('POST', self.__url.path, postdata, - {'Host': self.__url.hostname, - 'User-Agent': USER_AGENT, - 'Authorization': self.__auth_header, - 'Content-type': 'application/json'}) - results = [] - responses = self._get_response() - for response in responses: - if response['error'] is not None: - raise JSONRPCException(response['error']) - elif 'result' not in response: - raise JSONRPCException({ - 'code': -343, 'message': 'missing JSON-RPC result'}) - else: - results.append(response['result']) - return results - - def _get_response(self): - http_response = self.__conn.getresponse() - if http_response is None: - raise JSONRPCException({ - 'code': -342, 'message': 'missing HTTP response from server'}) - - responsedata = http_response.read().decode('utf8') - response = json.loads(responsedata, parse_float=decimal.Decimal) - if "error" in response and response["error"] is None: - log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal))) - else: - log.debug("<-- "+responsedata) - return response diff --git a/plugins/disabled-Zeroname-local/domainLookup.py b/plugins/disabled-Zeroname-local/domainLookup.py deleted file mode 100644 index 930168c0..00000000 --- a/plugins/disabled-Zeroname-local/domainLookup.py +++ /dev/null @@ -1,78 +0,0 @@ -from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException -import time, json, os, sys, re, socket - -# Connecting to RPC -def initRpc(config): - """Initialize Namecoin RPC""" - rpc_data = { - 'connect': '127.0.0.1', - 'port': '8336', - 'user': 'PLACEHOLDER', - 'password': 'PLACEHOLDER', - 'clienttimeout': '900' - } - try: - fptr = open(config, 'r') - lines = fptr.readlines() - fptr.close() - except: - return None # Or take some other appropriate action - - for line in lines: - if not line.startswith('rpc'): - continue - key_val = line.split(None, 1)[0] - (key, val) = key_val.split('=', 1) - if not key or not val: - continue - rpc_data[key[3:]] = val - - url = 'http://%(user)s:%(password)s@%(connect)s:%(port)s' % rpc_data - - return url, int(rpc_data['clienttimeout']) - -# Either returns domain's address or none if it doesn't exist -# Supports subdomains and .bit on the end -def lookupDomain(domain): - domain = domain.lower() - - #remove .bit on end - if domain[-4:] == ".bit": - domain = domain[0:-4] - - #check for subdomain - if domain.find(".") != -1: - subdomain = domain[0:domain.find(".")] - domain = domain[domain.find(".")+1:] - else: - subdomain = "" - - try: - domain_object = rpc.name_show("d/"+domain) - except: - #domain doesn't exist - return None - - domain_json = json.loads(domain_object["value"]) - - try: - domain_address = domain_json["zeronet"][subdomain] - except: - #domain exists but doesn't have any zeronet value - return None - - return domain_address - -# Loading config... - -# Check whether platform is on windows or linux -# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin - -if sys.platform == "win32": - namecoin_location = os.getenv('APPDATA') + "/Namecoin/" -else: - namecoin_location = os.path.expanduser("~/.namecoin/") - -# Initialize rpc connection -rpc_auth, rpc_timeout = initRpc(namecoin_location + "namecoin.conf") -rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) diff --git a/requirements.txt b/requirements.txt index e5cfb71e..538a6dfc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,13 @@ -gevent>=1.1.0 +gevent==1.4.0; python_version <= "3.6" +greenlet==0.4.16; python_version <= "3.6" +gevent>=20.9.0; python_version >= "3.7" msgpack>=0.4.4 +base58 +merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev +rsa +PySocks>=1.6.8 +pyasn1 +websocket_client +gevent-ws +coincurve +maxminddb diff --git a/src/Config.py b/src/Config.py index aab299fd..a9208d55 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,29 +3,39 @@ import sys import os import locale import re -import ConfigParser +import configparser import logging import logging.handlers import stat +import time class Config(object): def __init__(self, argv): - self.version = "0.6.5" - self.rev = 3870 + self.version = "0.9.0" + self.rev = 4630 self.argv = argv self.action = None + self.test_parser = None self.pending_changes = {} self.need_restart = False self.keys_api_change_allowed = set([ "tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers", - "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external" + "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline", + "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" + ]) + self.keys_restart_need = set([ + "tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" ]) - self.keys_restart_need = set(["tor", "fileserver_port", "fileserver_ip_type"]) self.start_dir = self.getStartDir() - self.config_file = "zeronet.conf" + self.config_file = self.start_dir + "/zeronet.conf" + self.data_dir = self.start_dir + "/data" + self.log_dir = self.start_dir + "/log" + self.openssl_lib_file = None + self.openssl_bin_file = None + self.trackers_file = False self.createParser() self.createArguments() @@ -46,20 +56,22 @@ class Config(object): def getStartDir(self): this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd") - if this_file.endswith("/Contents/Resources/core/src/Config.py"): + if "--start_dir" in self.argv: + start_dir = self.argv[self.argv.index("--start_dir") + 1] + elif this_file.endswith("/Contents/Resources/core/src/Config.py"): # Running as ZeroNet.app if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")): # Runnig from non-writeable directory, put data to Application Support - start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet").decode(sys.getfilesystemencoding()) + start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet") else: # Running from writeable directory put data next to .app - start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file).decode(sys.getfilesystemencoding()) + start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file) elif this_file.endswith("/core/src/Config.py"): # Running as exe or source is at Application Support directory, put var files to outside of core dir - start_dir = this_file.replace("/core/src/Config.py", "").decode(sys.getfilesystemencoding()) + start_dir = this_file.replace("/core/src/Config.py", "") elif this_file.endswith("usr/share/zeronet/src/Config.py"): # Running from non-writeable location, e.g., AppImage - start_dir = os.path.expanduser("~/ZeroNet").decode(sys.getfilesystemencoding()) + start_dir = os.path.expanduser("~/ZeroNet") else: start_dir = "." @@ -67,17 +79,15 @@ class Config(object): # Create command line arguments def createArguments(self): + from Crypt import CryptHash + access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled trackers = [ - "zero://boot3rdez4rzn36x.onion:15441", - "zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY - "udp://tracker.coppersurfer.tk:6969", # DE - "udp://tracker.port443.xyz:6969", # UK - "udp://104.238.198.186:8000", # US/LA - "http://tracker2.itzmx.com:6961/announce", # US/LA "http://open.acgnxtracker.com:80/announce", # DE - "http://open.trackerlist.xyz:80/announce", # Cloudflare - "https://1.tracker.eu.org:443/announce", # Google App Engine - "zero://2602:ffc5::c5b2:5360:26312" # US/ATL + "http://tracker.bt4g.com:2095/announce", # Cloudflare + "http://tracker.files.fm:6969/announce", + "http://t.publictracker.xyz:6969/announce", + "https://tracker.lilithraws.cf:443/announce", + "https://tracker.babico.name.tr:443/announce", ] # Platform specific if sys.platform.startswith("win"): @@ -111,6 +121,8 @@ class Config(object): # SiteCreate action = self.subparsers.add_parser("siteCreate", help='Create a new site') + action.register('type', 'bool', self.strToBool) + action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True) # SiteNeedFile action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site') @@ -199,19 +211,27 @@ class Config(object): action = self.subparsers.add_parser("testConnection", help='Testing') action = self.subparsers.add_parser("testAnnounce", help='Testing') + self.test_parser = self.subparsers.add_parser("test", help='Run a test') + self.test_parser.add_argument('test_name', help='Test name', nargs="?") + # self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true') + # Config parameters self.parser.add_argument('--verbose', help='More detailed logging', action='store_true') self.parser.add_argument('--debug', help='Debug mode', action='store_true') - self.parser.add_argument('--silent', help='Disable logging to terminal output', action='store_true') + self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true') self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') + self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true') self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true') + self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path") self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path") self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path") + self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"]) + self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path") - self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR"]) + self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"]) self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"]) self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int) @@ -224,11 +244,14 @@ class Config(object): self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') - self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D', + self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d', metavar='address') - self.parser.add_argument('--updatesite', help='Source code update site', default='1UPDatEDxnvHDo7TXvq6AEBARfNkyfxsp', + self.parser.add_argument('--updatesite', help='Source code update site', default='1Update8crprmciJHwp2WXqkx2c4iYp18', metavar='address') - self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit') + self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key') + self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source') + + self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit') self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit') self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit') @@ -240,15 +263,18 @@ class Config(object): self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"]) self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*') self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*') + self.parser.add_argument('--offline', help='Disable network communication', action='store_true') self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true') self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*') - self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path') + self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*') self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") - self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', - type='bool', choices=[True, False], default=use_openssl) + self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True) + self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True) + self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path") + self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path") self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true') self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true') @@ -267,6 +293,12 @@ class Config(object): self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification', type='bool', choices=[True, False], default=fix_float_decimals) self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed") + + self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int) + self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int) + self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int) + self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int) + self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual") self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, @@ -287,24 +319,27 @@ class Config(object): def loadTrackersFile(self): if not self.trackers_file: - return None - + self.trackers_file = ["trackers.txt", "{data_dir}/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d/trackers.txt"] self.trackers = self.arguments.trackers[:] - try: - if self.trackers_file.startswith("/"): # Absolute - trackers_file_path = self.trackers_file - elif self.trackers_file.startswith("{data_dir}"): # Relative to data_dir - trackers_file_path = self.trackers_file.replace("{data_dir}", self.data_dir) - else: # Relative to zeronet.py - trackers_file_path = self.start_dir + "/" + self.trackers_file + for trackers_file in self.trackers_file: + try: + if trackers_file.startswith("/"): # Absolute + trackers_file_path = trackers_file + elif trackers_file.startswith("{data_dir}"): # Relative to data_dir + trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir) + else: # Relative to zeronet.py + trackers_file_path = self.start_dir + "/" + trackers_file - for line in open(trackers_file_path): - tracker = line.strip() - if "://" in tracker and tracker not in self.trackers: - self.trackers.append(tracker) - except Exception as err: - print "Error loading trackers file: %s" % err + if not os.path.exists(trackers_file_path): + continue + + for line in open(trackers_file_path): + tracker = line.strip() + if "://" in tracker and tracker not in self.trackers: + self.trackers.append(tracker) + except Exception as err: + print("Error loading trackers file: %s" % err) # Find arguments specified for current action def getActionArguments(self): @@ -316,7 +351,7 @@ class Config(object): # Try to find action from argv def getAction(self, argv): - actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions + actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions found_action = False for action in actions: # See if any in argv if action in argv: @@ -345,8 +380,17 @@ class Config(object): valid_parameters.append(arg) return valid_parameters + plugin_parameters + def getParser(self, argv): + action = self.getAction(argv) + if not action: + return self.parser + else: + return self.subparsers.choices[action] + # Parse arguments from config file and command line def parse(self, silent=False, parse_config=True): + argv = self.argv[:] # Copy command line arguments + current_parser = self.getParser(argv) if silent: # Don't display messages or quit on unknown parameter original_print_message = self.parser._print_message original_exit = self.parser.exit @@ -354,11 +398,10 @@ class Config(object): def silencer(parser, function_name): parser.exited = True return None - self.parser.exited = False - self.parser._print_message = lambda *args, **kwargs: silencer(self.parser, "_print_message") - self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit") + current_parser.exited = False + current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message") + current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit") - argv = self.argv[:] # Copy command line arguments self.parseCommandline(argv, silent) # Parse argv self.setAttributes() if parse_config: @@ -372,10 +415,10 @@ class Config(object): self.ip_local.append(self.fileserver_ip) if silent: # Restore original functions - if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action + if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action self.action = None - self.parser._print_message = original_print_message - self.parser.exit = original_exit + current_parser._print_message = original_print_message + current_parser.exit = original_exit self.loadTrackersFile() @@ -404,7 +447,7 @@ class Config(object): self.config_file = argv[argv.index("--config_file") + 1] # Load config file if os.path.isfile(self.config_file): - config = ConfigParser.ConfigParser(allow_no_value=True) + config = configparser.RawConfigParser(allow_no_value=True, strict=False) config.read(self.config_file) for section in config.sections(): for key, val in config.items(section): @@ -414,7 +457,7 @@ class Config(object): key = section + "_" + key if key == "open_browser": # Prefer config file value over cli argument - if "--%s" % key in argv: + while "--%s" % key in argv: pos = argv.index("--open_browser") del argv[pos:pos + 2] @@ -428,6 +471,16 @@ class Config(object): argv = argv[:1] + argv_extend + argv[1:] return argv + # Return command line value of given argument + def getCmdlineValue(self, key): + if key not in self.argv: + return None + argv_index = self.argv.index(key) + if argv_index == len(self.argv) - 1: # last arg, test not specified + return None + + return self.argv[argv_index + 1] + # Expose arguments as class attributes def setAttributes(self): # Set attributes from arguments @@ -436,8 +489,9 @@ class Config(object): for key, val in args.items(): if type(val) is list: val = val[:] - if key in ("data_dir", "log_dir"): - val = val.replace("\\", "/") + if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"): + if val: + val = val.replace("\\", "/") setattr(self, key, val) def loadPlugins(self): @@ -446,7 +500,11 @@ class Config(object): @PluginManager.acceptPlugins class ConfigPlugin(object): def __init__(self, config): + self.argv = config.argv self.parser = config.parser + self.subparsers = config.subparsers + self.test_parser = config.test_parser + self.getCmdlineValue = config.getCmdlineValue self.createArguments() def createArguments(self): @@ -467,7 +525,7 @@ class Config(object): for line in lines: if line.strip() == "[global]": global_line_i = i - if line.startswith(key + " ="): + if line.startswith(key + " =") or line == key: key_line_i = i i += 1 @@ -500,6 +558,7 @@ class Config(object): def getServerInfo(self): from Plugin import PluginManager + import main info = { "platform": sys.platform, @@ -519,10 +578,10 @@ class Config(object): } try: - info["ip_external"] = sys.modules["main"].file_server.port_opened - info["tor_enabled"] = sys.modules["main"].file_server.tor_manager.enabled - info["tor_status"] = sys.modules["main"].file_server.tor_manager.status - except: + info["ip_external"] = main.file_server.port_opened + info["tor_enabled"] = main.file_server.tor_manager.enabled + info["tor_status"] = main.file_server.tor_manager.status + except Exception: pass return info @@ -533,12 +592,15 @@ class Config(object): else: format = '%(name)s %(message)s' - if self.silent: - level = logging.ERROR - elif self.debug: - level = logging.DEBUG + if self.console_log_level == "default": + if self.silent: + level = logging.ERROR + elif self.debug: + level = logging.DEBUG + else: + level = logging.INFO else: - level = logging.INFO + level = logging.getLevelName(self.console_log_level) console_logger = logging.StreamHandler() console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S")) @@ -550,27 +612,37 @@ class Config(object): log_file_path = "%s/debug.log" % self.log_dir else: log_file_path = "%s/cmd.log" % self.log_dir + if self.log_rotate == "off": - file_logger = logging.FileHandler(log_file_path) + file_logger = logging.FileHandler(log_file_path, "w", "utf-8") else: when_names = {"weekly": "w", "daily": "d", "hourly": "h"} file_logger = logging.handlers.TimedRotatingFileHandler( - log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count + log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count, + encoding="utf8" ) - file_logger.doRollover() # Always start with empty log file + + if os.path.isfile(log_file_path): + file_logger.doRollover() # Always start with empty log file file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s')) file_logger.setLevel(logging.getLevelName(self.log_level)) logging.getLogger('').setLevel(logging.getLevelName(self.log_level)) logging.getLogger('').addHandler(file_logger) - def initLogging(self): + def initLogging(self, console_logging=None, file_logging=None): + if console_logging == None: + console_logging = self.console_log_level != "off" + + if file_logging == None: + file_logging = self.log_level != "off" + # Create necessary files and dirs if not os.path.isdir(self.log_dir): os.mkdir(self.log_dir) try: os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) except Exception as err: - print "Can't change permission of %s: %s" % (self.log_dir, err) + print("Can't change permission of %s: %s" % (self.log_dir, err)) # Make warning hidden from console logging.WARNING = 15 # Don't display warnings if not in debug mode @@ -578,7 +650,26 @@ class Config(object): logging.getLogger('').name = "-" # Remove root prefix - self.initConsoleLogger() - self.initFileLogger() + self.error_logger = ErrorLogHandler() + self.error_logger.setLevel(logging.getLevelName("ERROR")) + logging.getLogger('').addHandler(self.error_logger) + + if console_logging: + self.initConsoleLogger() + if file_logging: + self.initFileLogger() + + +class ErrorLogHandler(logging.StreamHandler): + def __init__(self): + self.lines = [] + return super().__init__() + + def emit(self, record): + self.lines.append([time.time(), record.levelname, self.format(record)]) + + def onNewRecord(self, record): + pass + config = Config(sys.argv) diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py index 4edd33a2..22bcf29c 100644 --- a/src/Connection/Connection.py +++ b/src/Connection/Connection.py @@ -1,10 +1,7 @@ import socket import time -import random import gevent -import msgpack -import msgpack.fallback try: from gevent.coros import RLock except: @@ -12,17 +9,17 @@ except: from Config import config from Debug import Debug -from util import StreamingMsgpack +from util import Msgpack from Crypt import CryptConnection from util import helper class Connection(object): __slots__ = ( - "sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "req_id", "ip_type", + "sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "unpacker_bytes", "req_id", "ip_type", "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "handshake_time", "last_recv_time", "is_private_ip", "is_tracker_connection", "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", "send_lock", - "last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "updateName", "waiting_requests", "waiting_streams" + "last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "waiting_requests", "waiting_streams" ) def __init__(self, server, ip, port, sock=None, target_onion=None, is_tracker_connection=False): @@ -47,6 +44,7 @@ class Connection(object): self.server = server self.unpacker = None # Stream incoming socket messages here + self.unpacker_bytes = 0 # How many bytes the unpacker received self.req_id = 0 # Last request id self.handshake = {} # Handshake info got from peer self.crypt = None # Connection encryption method @@ -103,7 +101,7 @@ class Connection(object): return "<%s>" % self.__str__() def log(self, text): - self.server.log.debug("%s > %s" % (self.name, text.decode("utf8", "ignore"))) + self.server.log.debug("%s > %s" % (self.name, text)) def getValidSites(self): return [key for key, val in self.server.tor_manager.site_onions.items() if val == self.target_onion] @@ -127,11 +125,11 @@ class Connection(object): self.sock = self.server.tor_manager.createSocket(self.ip, self.port) elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: raise Exception("Can't connect to local IPs in Tor: always mode") - elif config.trackers_proxy != "disable" and self.is_tracker_connection: + elif config.trackers_proxy != "disable" and config.tor != "always" and self.is_tracker_connection: if config.trackers_proxy == "tor": self.sock = self.server.tor_manager.createSocket(self.ip, self.port) else: - from lib.PySocks import socks + import socks self.sock = socks.socksocket() proxy_ip, proxy_port = config.trackers_proxy.split(":") self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) @@ -163,17 +161,18 @@ class Connection(object): self.sock.do_handshake() self.crypt = "tls-rsa" self.sock_wrapped = True - except Exception, err: + except Exception as err: if not config.force_encryption: - self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip)) + self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err))) self.server.broken_ssl_ips[self.ip] = True self.sock.close() + self.crypt = None self.sock = self.createSocket() self.sock.settimeout(30) self.sock.connect(sock_address) # Detect protocol - self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo(), "random": "A" * random.randint(0, 1024)}) + self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo()}) event_connected = self.event_connected gevent.spawn(self.messageLoop) connect_res = event_connected.get() # Wait for handshake @@ -190,15 +189,22 @@ class Connection(object): self.type = "in" if self.ip not in config.ip_local: # Clearnet: Check implicit SSL try: - if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16": + first_byte = sock.recv(1, gevent.socket.MSG_PEEK) + if first_byte == b"\x16": self.log("Crypt in connection using implicit SSL") self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True) self.sock_wrapped = True self.crypt = "tls-rsa" - except Exception, err: + except Exception as err: self.log("Socket peek error: %s" % Debug.formatException(err)) self.messageLoop() + def getMsgpackUnpacker(self): + if self.handshake and self.handshake.get("use_bin_type"): + return Msgpack.getUnpacker(fallback=True, decode=False) + else: # Backward compatibility for <0.7.0 + return Msgpack.getUnpacker(fallback=True, decode=True) + # Message loop for connection def messageLoop(self): if not self.sock: @@ -209,7 +215,7 @@ class Connection(object): self.connected = True buff_len = 0 req_len = 0 - unpacker_bytes = 0 + self.unpacker_bytes = 0 try: while not self.closed: @@ -226,15 +232,15 @@ class Connection(object): req_len += buff_len if not self.unpacker: - self.unpacker = msgpack.fallback.Unpacker() - unpacker_bytes = 0 + self.unpacker = self.getMsgpackUnpacker() + self.unpacker_bytes = 0 self.unpacker.feed(buff) - unpacker_bytes += buff_len + self.unpacker_bytes += buff_len while True: try: - message = self.unpacker.next() + message = next(self.unpacker) except StopIteration: break if not type(message) is dict: @@ -258,10 +264,10 @@ class Connection(object): # Handle message if "stream_bytes" in message: - buff_left = self.handleStream(message, self.unpacker, buff, unpacker_bytes) - self.unpacker = msgpack.fallback.Unpacker() + buff_left = self.handleStream(message, buff) + self.unpacker = self.getMsgpackUnpacker() self.unpacker.feed(buff_left) - unpacker_bytes = len(buff_left) + self.unpacker_bytes = len(buff_left) if config.debug_socket: self.log("Start new unpacker with buff_left: %r" % buff_left) else: @@ -275,19 +281,23 @@ class Connection(object): self.server.stat_recv["error: %s" % err]["num"] += 1 self.close("MessageLoop ended (closed: %s)" % self.closed) # MessageLoop ended, close connection + def getUnpackerUnprocessedBytesNum(self): + if "tell" in dir(self.unpacker): + bytes_num = self.unpacker_bytes - self.unpacker.tell() + else: + bytes_num = self.unpacker._fb_buf_n - self.unpacker._fb_buf_o + return bytes_num + # Stream socket directly to a file - def handleStream(self, message, unpacker, buff, unpacker_bytes): + def handleStream(self, message, buff): stream_bytes_left = message["stream_bytes"] file = self.waiting_streams[message["to"]] - if "tell" in dir(unpacker): - unpacker_unprocessed_bytes = unpacker_bytes - unpacker.tell() - else: - unpacker_unprocessed_bytes = unpacker._fb_buf_n - unpacker._fb_buf_o + unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum() - if unpacker_unprocessed_bytes: # Found stream bytes in unpacker - unpacker_stream_bytes = min(unpacker_unprocessed_bytes, stream_bytes_left) - buff_stream_start = len(buff) - unpacker_unprocessed_bytes + if unprocessed_bytes_num: # Found stream bytes in unpacker + unpacker_stream_bytes = min(unprocessed_bytes_num, stream_bytes_left) + buff_stream_start = len(buff) - unprocessed_bytes_num file.write(buff[buff_stream_start:buff_stream_start + unpacker_stream_bytes]) stream_bytes_left -= unpacker_stream_bytes else: @@ -296,7 +306,7 @@ class Connection(object): if config.debug_socket: self.log( "Starting stream %s: %s bytes (%s from unpacker, buff size: %s, unprocessed: %s)" % - (message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unpacker_unprocessed_bytes) + (message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unprocessed_bytes_num) ) try: @@ -315,7 +325,7 @@ class Connection(object): self.incomplete_buff_recv += 1 self.bytes_recv += buff_len self.server.bytes_recv += buff_len - except Exception, err: + except Exception as err: self.log("Stream read error: %s" % Debug.formatException(err)) if config.debug_socket: @@ -329,13 +339,15 @@ class Connection(object): if unpacker_stream_bytes: return buff[buff_stream_start + unpacker_stream_bytes:] else: - return "" + return b"" # My handshake info def getHandshakeInfo(self): # No TLS for onion connections if self.ip_type == "onion": crypt_supported = [] + elif self.ip in self.server.broken_ssl_ips: + crypt_supported = [] else: crypt_supported = CryptConnection.manager.crypt_supported # No peer id for onion connections @@ -352,6 +364,7 @@ class Connection(object): handshake = { "version": config.version, "protocol": "v2", + "use_bin_type": True, "peer_id": peer_id, "fileserver_port": self.server.port, "port_opened": self.server.port_opened.get(self.ip_type, None), @@ -389,8 +402,20 @@ class Connection(object): else: self.port = int(handshake["fileserver_port"]) # Set peer fileserver port + if handshake.get("use_bin_type") and self.unpacker: + unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum() + self.log("Changing unpacker to bin type (unprocessed bytes: %s)" % unprocessed_bytes_num) + unprocessed_bytes = self.unpacker.read_bytes(unprocessed_bytes_num) + self.unpacker = self.getMsgpackUnpacker() # Create new unpacker for different msgpack type + self.unpacker_bytes = 0 + if unprocessed_bytes: + self.unpacker.feed(unprocessed_bytes) + # Check if we can encrypt the connection if handshake.get("crypt_supported") and self.ip not in self.server.broken_ssl_ips: + if type(handshake["crypt_supported"][0]) is bytes: + handshake["crypt_supported"] = [item.decode() for item in handshake["crypt_supported"]] # Backward compatibility + if self.ip_type == "onion" or self.ip in config.ip_local: crypt = None elif handshake.get("crypt"): # Recommended crypt by server @@ -415,10 +440,7 @@ class Connection(object): # Handle incoming message def handleMessage(self, message): - try: - cmd = message["cmd"] - except TypeError, AttributeError: - cmd = None + cmd = message["cmd"] self.last_message_time = time.time() self.last_cmd_recv = cmd @@ -456,12 +478,6 @@ class Connection(object): self.handleHandshake(message) else: self.server.handleRequest(self, message) - else: # Old style response, no req_id defined - self.log("Unknown message, waiting: %s" % self.waiting_requests.keys()) - if self.waiting_requests: - last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true - self.waiting_requests[last_req_id]["evt"].set(message) - del self.waiting_requests[last_req_id] # Remove from waiting request # Incoming handshake set request def handleHandshake(self, message): @@ -477,9 +493,9 @@ class Connection(object): try: self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin) self.sock_wrapped = True - except Exception, err: + except Exception as err: if not config.force_encryption: - self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip)) + self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err))) self.server.broken_ssl_ips[self.ip] = True self.close("Broken ssl") @@ -514,20 +530,20 @@ class Connection(object): self.server.stat_sent[stat_key]["num"] += 1 if streaming: with self.send_lock: - bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall) + bytes_sent = Msgpack.stream(message, self.sock.sendall) self.bytes_sent += bytes_sent self.server.bytes_sent += bytes_sent self.server.stat_sent[stat_key]["bytes"] += bytes_sent message = None else: - data = msgpack.packb(message) + data = Msgpack.pack(message) self.bytes_sent += len(data) self.server.bytes_sent += len(data) self.server.stat_sent[stat_key]["bytes"] += len(data) message = None with self.send_lock: self.sock.sendall(data) - except Exception, err: + except Exception as err: self.close("Send error: %s (cmd: %s)" % (err, stat_key)) return False self.last_sent_time = time.time() @@ -578,9 +594,9 @@ class Connection(object): with gevent.Timeout(10.0, False): try: response = self.request("ping") - except Exception, err: + except Exception as err: self.log("Ping error: %s" % Debug.formatException(err)) - if response and "body" in response and response["body"] == "Pong!": + if response and "body" in response and response["body"] == b"Pong!": self.last_ping_delay = time.time() - s return True else: @@ -609,7 +625,7 @@ class Connection(object): if self.sock: self.sock.shutdown(gevent.socket.SHUT_WR) self.sock.close() - except Exception, err: + except Exception as err: if config.debug_socket: self.log("Close error: %s" % err) diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py index 15274a54..c9048398 100644 --- a/src/Connection/ConnectionServer.py +++ b/src/Connection/ConnectionServer.py @@ -12,7 +12,7 @@ from gevent.pool import Pool import util from util import helper from Debug import Debug -from Connection import Connection +from .Connection import Connection from Config import config from Crypt import CryptConnection from Crypt import CryptHash @@ -30,7 +30,9 @@ class ConnectionServer(object): port = 15441 self.ip = ip self.port = port - self.last_connection_id = 1 # Connection id incrementer + self.last_connection_id = 0 # Connection id incrementer + self.last_connection_id_current_version = 0 # Connection id incrementer for current client version + self.last_connection_id_supported_version = 0 # Connection id incrementer for last supported version self.log = logging.getLogger("ConnServer") self.port_opened = {} self.peer_blacklist = SiteManager.peer_blacklist @@ -46,6 +48,8 @@ class ConnectionServer(object): self.stream_server = None self.stream_server_proxy = None self.running = False + self.stopping = False + self.thread_checker = None self.stat_recv = defaultdict(lambda: defaultdict(int)) self.stat_sent = defaultdict(lambda: defaultdict(int)) @@ -76,6 +80,8 @@ class ConnectionServer(object): self.handleRequest = request_handler def start(self, check_connections=True): + if self.stopping: + return False self.running = True if check_connections: self.thread_checker = gevent.spawn(self.checkConnections) @@ -94,24 +100,41 @@ class ConnectionServer(object): self.stream_server = StreamServer( (self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 ) - except Exception, err: + except Exception as err: self.log.info("StreamServer create error: %s" % Debug.formatException(err)) def listen(self): + if not self.running: + return None + if self.stream_server_proxy: gevent.spawn(self.listenProxy) try: self.stream_server.serve_forever() - except Exception, err: + except Exception as err: self.log.info("StreamServer listen error: %s" % err) + return False + self.log.debug("Stopped.") def stop(self): - self.log.debug("Stopping") + self.log.debug("Stopping %s" % self.stream_server) + self.stopping = True self.running = False + if self.thread_checker: + gevent.kill(self.thread_checker) if self.stream_server: self.stream_server.stop() + def closeConnections(self): + self.log.debug("Closing all connection: %s" % len(self.connections)) + for connection in self.connections[:]: + connection.close("Close all connections") + def handleIncomingConnection(self, sock, addr): + if config.offline: + sock.close() + return False + ip, port = addr[0:2] ip = ip.lower() if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping @@ -134,6 +157,11 @@ class ConnectionServer(object): connection = Connection(self, ip, port, sock) self.connections.append(connection) + rev = connection.handshake.get("rev", 0) + if rev >= 4560: + self.last_connection_id_supported_version += 1 + if rev == config.rev: + self.last_connection_id_current_version += 1 if ip not in config.ip_local: self.ips[ip] = connection connection.handleIncomingConnection(sock) @@ -178,7 +206,7 @@ class ConnectionServer(object): return connection # No connection found - if create: # Allow to create new connection if not found + if create and not config.offline: # Allow to create new connection if not found if port == 0: raise Exception("This peer is not connectable") @@ -198,8 +226,14 @@ class ConnectionServer(object): if not succ: connection.close("Connection event return error") raise Exception("Connection event return error") + else: + rev = connection.handshake.get("rev", 0) + if rev >= 4560: + self.last_connection_id_supported_version += 1 + if rev == config.rev: + self.last_connection_id_current_version += 1 - except Exception, err: + except Exception as err: connection.close("%s Connect error: %s" % (ip, Debug.formatException(err))) raise err @@ -227,11 +261,10 @@ class ConnectionServer(object): def checkConnections(self): run_i = 0 + time.sleep(15) while self.running: run_i += 1 - time.sleep(15) # Check every minute self.ip_incoming = {} # Reset connected ips counter - self.broken_ssl_ips = {} # Reset broken ssl peerids count last_message_time = 0 s = time.time() for connection in self.connections[:]: # Make a copy @@ -307,6 +340,8 @@ class ConnectionServer(object): if time.time() - s > 0.01: self.log.debug("Connection cleanup in %.3fs" % (time.time() - s)) + + time.sleep(15) self.log.debug("Checkconnections ended") @util.Noparallel(blocking=False) @@ -344,8 +379,8 @@ class ConnectionServer(object): for connection in self.connections if connection.handshake.get("time") and connection.last_ping_delay ]) - if len(corrections) < 6: + if len(corrections) < 9: return 0.0 - mid = len(corrections) / 2 - 1 + mid = int(len(corrections) / 2 - 1) median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3 return median diff --git a/src/Connection/__init__.py b/src/Connection/__init__.py index 5bd29c6e..d419a3f0 100644 --- a/src/Connection/__init__.py +++ b/src/Connection/__init__.py @@ -1,2 +1,2 @@ -from ConnectionServer import ConnectionServer -from Connection import Connection +from .ConnectionServer import ConnectionServer +from .Connection import Connection diff --git a/src/Content/ContentDb.py b/src/Content/ContentDb.py index 307b47bb..f284581e 100644 --- a/src/Content/ContentDb.py +++ b/src/Content/ContentDb.py @@ -1,7 +1,6 @@ -import time import os -from Db import Db +from Db.Db import Db, DbTableError from Config import config from Plugin import PluginManager from Debug import Debug @@ -12,21 +11,29 @@ class ContentDb(Db): def __init__(self, path): Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) self.foreign_keys = True + + def init(self): try: self.schema = self.getSchema() - self.checkTables() + try: + self.checkTables() + except DbTableError: + pass self.log.debug("Checking foreign keys...") foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone() if foreign_key_error: raise Exception("Database foreign key error: %s" % foreign_key_error) - except Exception, err: + except Exception as err: self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err)) self.close() - os.unlink(path) # Remove and try again - Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) + os.unlink(self.db_path) # Remove and try again + Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, self.db_path) self.foreign_keys = True self.schema = self.getSchema() - self.checkTables() + try: + self.checkTables() + except DbTableError: + pass self.site_ids = {} self.sites = {} @@ -95,8 +102,8 @@ class ContentDb(Db): def setContent(self, site, inner_path, content, size=0): self.insertOrUpdate("content", { "size": size, - "size_files": sum([val["size"] for key, val in content.get("files", {}).iteritems()]), - "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).iteritems()]), + "size_files": sum([val["size"] for key, val in content.get("files", {}).items()]), + "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]), "modified": int(content.get("modified", 0)) }, { "site_id": self.site_ids.get(site.address, 0), @@ -149,6 +156,7 @@ def getContentDb(path=None): path = "%s/content.db" % config.data_dir if path not in content_dbs: content_dbs[path] = ContentDb(path) + content_dbs[path].init() return content_dbs[path] getContentDb() # Pre-connect to default one diff --git a/src/Content/ContentDbDict.py b/src/Content/ContentDbDict.py index b47a15a3..01df0427 100644 --- a/src/Content/ContentDbDict.py +++ b/src/Content/ContentDbDict.py @@ -1,7 +1,7 @@ import time import os -import ContentDb +from . import ContentDb from Debug import Debug from Config import config @@ -127,29 +127,29 @@ if __name__ == "__main__": s_mem = process.memory_info()[0] / float(2 ** 20) root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27" contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root) - print "Init len", len(contents) + print("Init len", len(contents)) s = time.time() for dir_name in os.listdir(root + "/data/users/")[0:8000]: contents["data/users/%s/content.json" % dir_name] - print "Load: %.3fs" % (time.time() - s) + print("Load: %.3fs" % (time.time() - s)) s = time.time() found = 0 - for key, val in contents.iteritems(): + for key, val in contents.items(): found += 1 assert key assert val - print "Found:", found - print "Iteritem: %.3fs" % (time.time() - s) + print("Found:", found) + print("Iteritem: %.3fs" % (time.time() - s)) s = time.time() found = 0 - for key in contents.keys(): + for key in list(contents.keys()): found += 1 assert key in contents - print "In: %.3fs" % (time.time() - s) + print("In: %.3fs" % (time.time() - s)) - print "Len:", len(contents.values()), len(contents.keys()) + print("Len:", len(list(contents.values())), len(list(contents.keys()))) - print "Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem + print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem) diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index e2e2860a..623cc707 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -3,6 +3,8 @@ import time import re import os import copy +import base64 +import sys import gevent @@ -13,7 +15,7 @@ from util import helper from util import Diff from util import SafeRe from Peer import PeerHashfield -from ContentDbDict import ContentDbDict +from .ContentDbDict import ContentDbDict from Plugin import PluginManager @@ -38,13 +40,13 @@ class ContentManager(object): # Load all content.json files def loadContents(self): if len(self.contents) == 0: - self.log.debug("ContentDb not initialized, load files from filesystem") + self.log.info("ContentDb not initialized, load files from filesystem...") self.loadContent(add_bad_files=False, delete_removed_files=False) self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() # Load hashfield cache if "hashfield" in self.site.settings.get("cache", {}): - self.hashfield.fromstring(self.site.settings["cache"]["hashfield"].decode("base64")) + self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"])) del self.site.settings["cache"]["hashfield"] elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0: self.site.storage.updateBadFiles() # No hashfield cache created yet @@ -52,6 +54,19 @@ class ContentManager(object): self.contents.db.initSite(self.site) + def getFileChanges(self, old_files, new_files): + deleted = {key: val for key, val in old_files.items() if key not in new_files} + deleted_hashes = {val.get("sha512"): key for key, val in old_files.items() if key not in new_files} + added = {key: val for key, val in new_files.items() if key not in old_files} + renamed = {} + for relative_path, node in added.items(): + hash = node.get("sha512") + if hash in deleted_hashes: + relative_path_old = deleted_hashes[hash] + renamed[relative_path_old] = relative_path + del(deleted[relative_path_old]) + return list(deleted), renamed + # Load content.json to self.content # Return: Changed files ["index.html", "data/messages.json"], Deleted files ["old.jpg"] def loadContent(self, content_inner_path="content.json", add_bad_files=True, delete_removed_files=True, load_includes=True, force=False): @@ -68,13 +83,13 @@ class ContentManager(object): for line in open(content_path): if '"modified"' not in line: continue - match = re.search("([0-9\.]+),$", line.strip(" \r\n")) + match = re.search(r"([0-9\.]+),$", line.strip(" \r\n")) if match and float(match.group(1)) <= old_content.get("modified", 0): self.log.debug("%s loadContent same json file, skipping" % content_inner_path) return [], [] - new_content = json.load(open(content_path)) - except Exception, err: + new_content = self.site.storage.loadJson(content_inner_path) + except Exception as err: self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err))) return [], [] else: @@ -86,7 +101,7 @@ class ContentManager(object): changed = [] deleted = [] # Check changed - for relative_path, info in new_content.get("files", {}).iteritems(): + for relative_path, info in new_content.get("files", {}).items(): if "sha512" in info: hash_type = "sha512" else: # Backward compatibility @@ -101,7 +116,7 @@ class ContentManager(object): changed.append(content_inner_dir + relative_path) # Check changed optional files - for relative_path, info in new_content.get("files_optional", {}).iteritems(): + for relative_path, info in new_content.get("files_optional", {}).items(): file_inner_path = content_inner_dir + relative_path new_hash = info["sha512"] if old_content and old_content.get("files_optional", {}).get(relative_path): @@ -115,8 +130,8 @@ class ContentManager(object): self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"]) self.optionalDelete(file_inner_path) self.log.debug("Deleted changed optional file: %s" % file_inner_path) - except Exception, err: - self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) + except Exception as err: + self.log.warning("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) else: # The file is not in the old content if self.site.isDownloadable(file_inner_path): changed.append(file_inner_path) # Download new file @@ -133,13 +148,26 @@ class ContentManager(object): **new_content.get("files_optional", {}) ) - deleted = [key for key in old_files if key not in new_files] + deleted, renamed = self.getFileChanges(old_files, new_files) + + for relative_path_old, relative_path_new in renamed.items(): + self.log.debug("Renaming: %s -> %s" % (relative_path_old, relative_path_new)) + if relative_path_new in new_content.get("files_optional", {}): + self.optionalRenamed(content_inner_dir + relative_path_old, content_inner_dir + relative_path_new) + if self.site.storage.isFile(relative_path_old): + try: + self.site.storage.rename(relative_path_old, relative_path_new) + if relative_path_new in changed: + changed.remove(relative_path_new) + self.log.debug("Renamed: %s -> %s" % (relative_path_old, relative_path_new)) + except Exception as err: + self.log.warning("Error renaming file: %s -> %s %s" % (relative_path_old, relative_path_new, err)) + if deleted and not self.site.settings.get("own"): # Deleting files that no longer in content.json for file_relative_path in deleted: file_inner_path = content_inner_dir + file_relative_path try: - # Check if the deleted file is optional if old_content.get("files_optional") and old_content["files_optional"].get(file_relative_path): self.optionalDelete(file_inner_path) @@ -151,7 +179,7 @@ class ContentManager(object): self.site.storage.delete(file_inner_path) self.log.debug("Deleted file: %s" % file_inner_path) - except Exception, err: + except Exception as err: self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) # Cleanup empty dirs @@ -165,7 +193,7 @@ class ContentManager(object): self.site.storage.deleteDir(root_inner_path) # Remove from tree dict to reflect changed state tree[os.path.dirname(root)][0].remove(os.path.basename(root)) - except Exception, err: + except Exception as err: self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err)) # Check archived @@ -175,12 +203,12 @@ class ContentManager(object): self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived))) archived_changed = { key: date_archived - for key, date_archived in new_archived.iteritems() + for key, date_archived in new_archived.items() if old_archived.get(key) != new_archived[key] } if archived_changed: self.log.debug("Archived changed: %s" % archived_changed) - for archived_dirname, date_archived in archived_changed.iteritems(): + for archived_dirname, date_archived in archived_changed.items(): archived_inner_path = content_inner_dir + archived_dirname + "/content.json" if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived: self.removeContent(archived_inner_path) @@ -204,7 +232,7 @@ class ContentManager(object): # Remove archived files from download queue num_removed_bad_files = 0 - for bad_file in self.site.bad_files.keys(): + for bad_file in list(self.site.bad_files.keys()): if bad_file.endswith("content.json"): del self.site.bad_files[bad_file] num_removed_bad_files += 1 @@ -217,7 +245,7 @@ class ContentManager(object): # Load includes if load_includes and "includes" in new_content: - for relative_path, info in new_content["includes"].items(): + for relative_path, info in list(new_content["includes"].items()): include_inner_path = content_inner_dir + relative_path if self.site.storage.isFile(include_inner_path): # Content.json exists, load it include_changed, include_deleted = self.loadContent( @@ -255,7 +283,7 @@ class ContentManager(object): self.has_optional_files = True # Update the content self.contents[content_inner_path] = new_content - except Exception, err: + except Exception as err: self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err))) return [], [] # Content.json parse error @@ -282,7 +310,7 @@ class ContentManager(object): content.get("files", {}), **content.get("files_optional", {}) ) - except Exception, err: + except Exception as err: self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err))) files = {} files["content.json"] = True @@ -292,16 +320,16 @@ class ContentManager(object): try: self.site.storage.delete(file_inner_path) self.log.debug("Deleted file: %s" % file_inner_path) - except Exception, err: + except Exception as err: self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) try: self.site.storage.deleteDir(inner_dir) - except Exception, err: + except Exception as err: self.log.debug("Error deleting dir %s: %s" % (inner_dir, err)) try: del self.contents[inner_path] - except Exception, err: + except Exception as err: self.log.debug("Error key from contents: %s" % inner_path) # Get total size of site @@ -317,14 +345,14 @@ class ContentManager(object): return [] back = [inner_path] content_inner_dir = helper.getDirname(inner_path) - for relative_path in self.contents[inner_path].get("includes", {}).keys(): + for relative_path in list(self.contents[inner_path].get("includes", {}).keys()): include_inner_path = content_inner_dir + relative_path back += self.listContents(include_inner_path) return back # Returns if file with the given modification date is archived or not def isArchived(self, inner_path, modified): - match = re.match("(.*)/(.*?)/", inner_path) + match = re.match(r"(.*)/(.*?)/", inner_path) if not match: return False user_contents_inner_path = match.group(1) + "/content.json" @@ -333,7 +361,7 @@ class ContentManager(object): file_info = self.getFileInfo(user_contents_inner_path) if file_info: time_archived_before = file_info.get("archived_before", 0) - time_directory_archived = file_info.get("archived", {}).get(relative_directory) + time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0) if modified <= time_archived_before or modified <= time_directory_archived: return True else: @@ -402,7 +430,7 @@ class ContentManager(object): back = content["user_contents"] content_inner_path_dir = helper.getDirname(content_inner_path) relative_content_path = inner_path[len(content_inner_path_dir):] - user_auth_address_match = re.match("([A-Za-z0-9]+)/.*", relative_content_path) + user_auth_address_match = re.match(r"([A-Za-z0-9]+)/.*", relative_content_path) if user_auth_address_match: user_auth_address = user_auth_address_match.group(1) back["content_inner_path"] = "%s%s/content.json" % (content_inner_path_dir, user_auth_address) @@ -468,9 +496,9 @@ class ContentManager(object): # Delivered for directory if "inner_path" in parent_content: parent_content_dir = helper.getDirname(parent_content["inner_path"]) - user_address = re.match("([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1) + user_address = re.match(r"([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1) else: - user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) + user_address = re.match(r".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) try: if not content: @@ -493,11 +521,11 @@ class ContentManager(object): banned = False if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers - for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules + for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules if not SafeRe.match(permission_pattern, user_urn): continue # Rule is not valid for user # Update rules if its better than current recorded ones - for key, val in permission_rules.iteritems(): + for key, val in permission_rules.items(): if key not in rules: if type(val) is list: rules[key] = val[:] # Make copy @@ -567,20 +595,27 @@ class ContentManager(object): return back def isValidRelativePath(self, relative_path): - if ".." in relative_path: + if ".." in relative_path.replace("\\", "/").split("/"): return False elif len(relative_path) > 255: return False + elif relative_path[0] in ("/", "\\"): # Starts with + return False + elif relative_path[-1] in (".", " "): # Ends with + return False + elif re.match(r".*(^|/)(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]|CONOUT\$|CONIN\$)(\.|/|$)", relative_path, re.IGNORECASE): # Protected on Windows + return False else: - return re.match("^[a-z\[\]\(\) A-Z0-9~_@=\.\+-/]+$", relative_path) + return re.match(r"^[^\x00-\x1F\"*:<>?\\|]+$", relative_path) def sanitizePath(self, inner_path): - return re.sub("[^a-z\[\]\(\) A-Z0-9_@=\.\+-/]", "", inner_path) + return re.sub("[\x00-\x1F\"*:<>?\\|]", "", inner_path) # Hash files in directory def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None): files_node = {} files_optional_node = {} + db_inner_path = self.site.storage.getDbFile() if dir_inner_path and not self.isValidRelativePath(dir_inner_path): ignored = True self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path) @@ -596,7 +631,7 @@ class ContentManager(object): elif not self.isValidRelativePath(file_relative_path): ignored = True self.log.error("- [ERROR] Invalid filename: %s" % file_relative_path) - elif dir_inner_path == "" and file_relative_path == self.site.storage.getDbFile(): + elif dir_inner_path == "" and db_inner_path and file_relative_path.startswith(db_inner_path): ignored = True elif optional_pattern and SafeRe.match(optional_pattern, file_relative_path): optional = True @@ -649,7 +684,7 @@ class ContentManager(object): if extend: # Add extend keys if not exists - for key, val in extend.items(): + for key, val in list(extend.items()): if not content.get(key): content[key] = val self.log.info("Extending content.json with: %s" % key) @@ -664,14 +699,14 @@ class ContentManager(object): ) if not remove_missing_optional: - for file_inner_path, file_details in content.get("files_optional", {}).iteritems(): + for file_inner_path, file_details in content.get("files_optional", {}).items(): if file_inner_path not in files_optional_node: files_optional_node[file_inner_path] = file_details # Find changed files files_merged = files_node.copy() files_merged.update(files_optional_node) - for file_relative_path, file_details in files_merged.iteritems(): + for file_relative_path, file_details in files_merged.items(): old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512") new_hash = files_merged[file_relative_path]["sha512"] if old_hash != new_hash: @@ -692,7 +727,6 @@ class ContentManager(object): elif "files_optional" in new_content: del new_content["files_optional"] - new_content["modified"] = int(time.time()) # Add timestamp if inner_path == "content.json": new_content["zeronet_version"] = config.version new_content["signs_required"] = content.get("signs_required", 1) @@ -712,9 +746,11 @@ class ContentManager(object): ) self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) + signs_required = 1 if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key, then sign the valid signers - signers_data = "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)) + signs_required = new_content["signs_required"] + signers_data = "%s:%s" % (signs_required, ",".join(valid_signers)) new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey) if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none") @@ -722,15 +758,32 @@ class ContentManager(object): self.log.info("Signing %s..." % inner_path) if "signs" in new_content: - del(new_content["signs"]) # Delete old signs + # del(new_content["signs"]) # Delete old signs + old_signs_content = new_content["signs"] + del(new_content["signs"]) + else: + old_signs_content = None if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility) - sign_content = json.dumps(new_content, sort_keys=True) + if signs_required > 1: + has_valid_sign = False + sign_content = json.dumps(new_content, sort_keys=True) + for signer in valid_signers: + res = CryptBitcoin.verify(sign_content,signer,old_signs_content[signer]); + print(res) + if res: + has_valid_sign = has_valid_sign or res + if has_valid_sign: + new_content["modified"] = content["modified"] + sign_content = json.dumps(new_content, sort_keys=True) + else: + new_content["modified"] = int(time.time()) # Add timestamp + sign_content = json.dumps(new_content, sort_keys=True) sign = CryptBitcoin.sign(sign_content, privatekey) # new_content["signs"] = content.get("signs", {}) # TODO: Multisig if sign: # If signing is successful (not an old address) - new_content["signs"] = {} + new_content["signs"] = old_signs_content or {} new_content["signs"][privatekey_address] = sign self.verifyContent(inner_path, new_content) @@ -765,11 +818,16 @@ class ContentManager(object): # Return: The required number of valid signs for the content.json def getSignsRequired(self, inner_path, content=None): - return 1 # Todo: Multisig + if not content: + return 1 + return content.get("signs_required", 1) + + def verifyCertSign(self, user_address, user_auth_type, user_name, issuer_address, sign): + from Crypt import CryptBitcoin + cert_subject = "%s#%s/%s" % (user_address, user_auth_type, user_name) + return CryptBitcoin.verify(cert_subject, issuer_address, sign) def verifyCert(self, inner_path, content): - from Crypt import CryptBitcoin - rules = self.getRules(inner_path, content) if not rules: @@ -792,22 +850,17 @@ class ContentManager(object): else: raise VerifyError("Invalid cert signer: %s" % domain) - try: - cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name) - result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"]) - except Exception, err: - raise VerifyError("Certificate verify error: %s" % err) - return result + return self.verifyCertSign(rules["user_address"], content["cert_auth_type"], name, cert_address, content["cert_sign"]) # Checks if the content.json content is valid # Return: True or False def verifyContent(self, inner_path, content): - content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values() if file["size"] >= 0]) # Size of new content + content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content # Calculate old content size old_content = self.contents.get(inner_path) if old_content: - old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in old_content.get("files", {}).values()]) - old_content_size_optional = sum([file["size"] for file in old_content.get("files_optional", {}).values()]) + old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())]) + old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())]) else: old_content_size = 0 old_content_size_optional = 0 @@ -816,7 +869,7 @@ class ContentManager(object): if not old_content and inner_path == "content.json": self.site.settings["size"] = 0 - content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values() if file["size"] >= 0]) + content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0]) site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new @@ -830,18 +883,19 @@ class ContentManager(object): if content.get("inner_path") and content["inner_path"] != inner_path: raise VerifyError("Wrong inner_path: %s" % content["inner_path"]) - # Check total site size limit - if site_size > site_size_limit: - if inner_path == "content.json" and self.site.settings["size"] == 0: - # First content.json download, save site size to display warning + # If our content.json file bigger than the size limit throw error + if inner_path == "content.json": + content_size_file = len(json.dumps(content, indent=1)) + if content_size_file > site_size_limit: + # Save site size to display warning self.site.settings["size"] = site_size - task = self.site.worker_manager.findTask(inner_path) - if task: # Dont try to download from other peers - self.site.worker_manager.failTask(task) - raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit)) + task = self.site.worker_manager.tasks.findTask(inner_path) + if task: # Dont try to download from other peers + self.site.worker_manager.failTask(task) + raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit)) # Verify valid filenames - for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): if not self.isValidRelativePath(file_relative_path): raise VerifyError("Invalid relative path: %s" % file_relative_path) @@ -855,7 +909,7 @@ class ContentManager(object): self.site.settings["size_optional"] = site_size_optional return True else: - return False + raise VerifyError("Content verify error") def verifyContentInclude(self, inner_path, content, content_size, content_size_optional): # Load include details @@ -876,13 +930,13 @@ class ContentManager(object): # Filename limit if rules.get("files_allowed"): - for file_inner_path in content["files"].keys(): - if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path): + for file_inner_path in list(content["files"].keys()): + if not SafeRe.match(r"^%s$" % rules["files_allowed"], file_inner_path): raise VerifyError("File not allowed: %s" % file_inner_path) if rules.get("files_allowed_optional"): - for file_inner_path in content.get("files_optional", {}).keys(): - if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path): + for file_inner_path in list(content.get("files_optional", {}).keys()): + if not SafeRe.match(r"^%s$" % rules["files_allowed_optional"], file_inner_path): raise VerifyError("Optional file not allowed: %s" % file_inner_path) # Check if content includes allowed @@ -900,7 +954,13 @@ class ContentManager(object): if type(file) is dict: new_content = file else: - new_content = json.load(file) + try: + if sys.version_info.major == 3 and sys.version_info.minor < 6: + new_content = json.loads(file.read().decode("utf8")) + else: + new_content = json.load(file) + except Exception as err: + raise VerifyError("Invalid json file: %s" % err) if inner_path in self.contents: old_content = self.contents.get(inner_path, {"modified": 0}) # Checks if its newer the ours @@ -948,23 +1008,22 @@ class ContentManager(object): if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid raise VerifyError("Invalid cert!") - valid_signs = 0 + valid_signs = [] for address in valid_signers: if address in signs: - valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) - if valid_signs >= signs_required: + result = CryptBitcoin.verify(sign_content, address, signs[address]) + if result: + valid_signs.append(address) + if len(valid_signs) >= signs_required: break # Break if we has enough signs - if valid_signs < signs_required: - raise VerifyError("Valid signs: %s/%s" % (valid_signs, signs_required)) + if len(valid_signs) < signs_required: + raise VerifyError("Valid signs: %s/%s, Valid Signers : %s" % (len(valid_signs), signs_required, valid_signs)) else: return self.verifyContent(inner_path, new_content) else: # Old style signing - if CryptBitcoin.verify(sign_content, self.site.address, sign): - return self.verifyContent(inner_path, new_content) - else: - raise VerifyError("Invalid old-style sign") + raise VerifyError("Invalid old-style sign") - except Exception, err: + except Exception as err: self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err))) raise err @@ -1003,3 +1062,6 @@ class ContentManager(object): self.site.settings["optional_downloaded"] -= size return done + + def optionalRenamed(self, inner_path_old, inner_path_new): + return True diff --git a/src/Content/__init__.py b/src/Content/__init__.py index fab39f93..fbbd39f4 100644 --- a/src/Content/__init__.py +++ b/src/Content/__init__.py @@ -1 +1 @@ -from ContentManager import ContentManager \ No newline at end of file +from .ContentManager import ContentManager \ No newline at end of file diff --git a/src/Crypt/Crypt.py b/src/Crypt/Crypt.py new file mode 100644 index 00000000..7d7d3659 --- /dev/null +++ b/src/Crypt/Crypt.py @@ -0,0 +1,4 @@ +from Config import config +from util import ThreadPool + +thread_pool_crypt = ThreadPool.ThreadPool(config.threads_crypt) \ No newline at end of file diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py index 91ddb10a..68b2caa2 100644 --- a/src/Crypt/CryptBitcoin.py +++ b/src/Crypt/CryptBitcoin.py @@ -1,78 +1,101 @@ import logging +import base64 +import binascii +import time +import hashlib -from lib.BitcoinECC import BitcoinECC -from lib.pybitcointools import bitcoin as btctools +from util.Electrum import dbl_format from Config import config -# Try to load openssl +import util.OpensslFindPatch + +lib_verify_best = "sslcrypto" + +from lib import sslcrypto +sslcurve_native = sslcrypto.ecc.get_curve("secp256k1") +sslcurve_fallback = sslcrypto.fallback.ecc.get_curve("secp256k1") +sslcurve = sslcurve_native + +def loadLib(lib_name, silent=False): + global sslcurve, libsecp256k1message, lib_verify_best + if lib_name == "libsecp256k1": + s = time.time() + from lib import libsecp256k1message + import coincurve + lib_verify_best = "libsecp256k1" + if not silent: + logging.info( + "Libsecpk256k1 loaded: %s in %.3fs" % + (type(coincurve._libsecp256k1.lib).__name__, time.time() - s) + ) + elif lib_name == "sslcrypto": + sslcurve = sslcurve_native + if sslcurve_native == sslcurve_fallback: + logging.warning("SSLCurve fallback loaded instead of native") + elif lib_name == "sslcrypto_fallback": + sslcurve = sslcurve_fallback + try: - if not config.use_openssl: + if not config.use_libsecp256k1: raise Exception("Disabled by config") - from lib.opensslVerify import opensslVerify - logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version) -except Exception, err: - logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err) - opensslVerify = None + loadLib("libsecp256k1") + lib_verify_best = "libsecp256k1" +except Exception as err: + logging.info("Libsecp256k1 load failed: %s" % err) -def newPrivatekey(uncompressed=True): # Return new private key - privatekey = btctools.encode_privkey(btctools.random_key(), "wif") - return privatekey +def newPrivatekey(): # Return new private key + return sslcurve.private_to_wif(sslcurve.new_private_key()).decode() def newSeed(): - return btctools.random_key() + return binascii.hexlify(sslcurve.new_private_key()).decode() def hdPrivatekey(seed, child): - masterkey = btctools.bip32_master_key(seed) - childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems - key = btctools.bip32_extract_key(childkey) - return btctools.encode_privkey(key, "wif") + # Too large child id could cause problems + privatekey_bin = sslcurve.derive_child(seed.encode(), child % 100000000) + return sslcurve.private_to_wif(privatekey_bin).decode() def privatekeyToAddress(privatekey): # Return address from private key - if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib - bitcoin = BitcoinECC.Bitcoin() - bitcoin.BitcoinAddressFromPrivate(privatekey) - return bitcoin.BitcoinAddresFromPublicKey() - else: - try: - return btctools.privkey_to_address(privatekey) - except Exception: # Invalid privatekey - return False + try: + if len(privatekey) == 64: + privatekey_bin = bytes.fromhex(privatekey) + else: + privatekey_bin = sslcurve.wif_to_private(privatekey.encode()) + return sslcurve.private_to_address(privatekey_bin).decode() + except Exception: # Invalid privatekey + return False def sign(data, privatekey): # Return sign to data using private key if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported - sign = btctools.ecdsa_sign(data, privatekey) - return sign + return base64.b64encode(sslcurve.sign( + data.encode(), + sslcurve.wif_to_private(privatekey.encode()), + recoverable=True, + hash=dbl_format + )).decode() -def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style) - bitcoin = BitcoinECC.Bitcoin() - bitcoin.BitcoinAddressFromPrivate(privatekey) - sign = bitcoin.SignECDSA(data) - return sign +def verify(data, valid_address, sign, lib_verify=None): # Verify data using address and sign + if not lib_verify: + lib_verify = lib_verify_best - -def verify(data, address, sign): # Verify data using address and sign if not sign: return False - if hasattr(sign, "endswith"): - if opensslVerify: # Use the faster method if avalible - pub = opensslVerify.getMessagePubkey(data, sign) - sign_address = btctools.pubtoaddr(pub) - else: # Use pure-python - pub = btctools.ecdsa_recover(data, sign) - sign_address = btctools.pubtoaddr(pub) + if lib_verify == "libsecp256k1": + sign_address = libsecp256k1message.recover_address(data.encode("utf8"), sign).decode("utf8") + elif lib_verify in ("sslcrypto", "sslcrypto_fallback"): + publickey = sslcurve.recover(base64.b64decode(sign), data.encode(), hash=dbl_format) + sign_address = sslcurve.public_to_address(publickey).decode() + else: + raise Exception("No library enabled for signature verification") - if type(address) is list: # Any address in the list - return sign_address in address - else: # One possible address - return sign_address == address - else: # Backward compatible old style - bitcoin = BitcoinECC.Bitcoin() - return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign) + if type(valid_address) is list: # Any address in the list + return sign_address in valid_address + else: # One possible address + return sign_address == valid_address diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py index 0897d2af..c0903e84 100644 --- a/src/Crypt/CryptConnection.py +++ b/src/Crypt/CryptConnection.py @@ -6,18 +6,30 @@ import hashlib import random from Config import config -from util import SslPatch from util import helper class CryptConnectionManager: def __init__(self): - # OpenSSL params - if sys.platform.startswith("win"): - self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe" + if config.openssl_bin_file: + self.openssl_bin = config.openssl_bin_file + elif sys.platform.startswith("win"): + self.openssl_bin = "tools\\openssl\\openssl.exe" + elif config.dist_type.startswith("bundle_linux"): + self.openssl_bin = "../runtime/bin/openssl" else: self.openssl_bin = "openssl" - self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"} + + self.context_client = None + self.context_server = None + + self.openssl_conf_template = "src/lib/openssl/openssl.cnf" + self.openssl_conf = config.data_dir + "/openssl.cnf" + + self.openssl_env = { + "OPENSSL_CONF": self.openssl_conf, + "RANDFILE": config.data_dir + "/openssl-rand.tmp" + } self.crypt_supported = [] # Supported cryptos @@ -27,6 +39,44 @@ class CryptConnectionManager: self.cert_csr = config.data_dir + "/cert-rsa.csr" self.key_pem = config.data_dir + "/key-rsa.pem" + self.log = logging.getLogger("CryptConnectionManager") + self.log.debug("Version: %s" % ssl.OPENSSL_VERSION) + + self.fakedomains = [ + "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com", + "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com", + "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp", + "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com", + "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com", + "nazwa.pl", "symantec.com" + ] + + def createSslContexts(self): + if self.context_server and self.context_client: + return False + ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:" + ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" + + if hasattr(ssl, "PROTOCOL_TLS"): + protocol = ssl.PROTOCOL_TLS + else: + protocol = ssl.PROTOCOL_TLSv1_2 + self.context_client = ssl.SSLContext(protocol) + self.context_client.check_hostname = False + self.context_client.verify_mode = ssl.CERT_NONE + + self.context_server = ssl.SSLContext(protocol) + self.context_server.load_cert_chain(self.cert_pem, self.key_pem) + + for ctx in (self.context_client, self.context_server): + ctx.set_ciphers(ciphers) + ctx.options |= ssl.OP_NO_COMPRESSION + try: + ctx.set_alpn_protocols(["h2", "http/1.1"]) + ctx.set_npn_protocols(["h2", "http/1.1"]) + except Exception: + pass + # Select crypt that supported by both sides # Return: Name of the crypto def selectCrypt(self, client_supported): @@ -39,18 +89,14 @@ class CryptConnectionManager: # Return: wrapped socket def wrapSocket(self, sock, crypt, server=False, cert_pin=None): if crypt == "tls-rsa": - ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:" - ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" if server: - sock_wrapped = ssl.wrap_socket( - sock, server_side=server, keyfile=self.key_pem, - certfile=self.cert_pem, ciphers=ciphers - ) + sock_wrapped = self.context_server.wrap_socket(sock, server_side=True) else: - sock_wrapped = ssl.wrap_socket(sock, ciphers=ciphers) + sock_wrapped = self.context_client.wrap_socket(sock, server_hostname=random.choice(self.fakedomains)) if cert_pin: cert_hash = hashlib.sha256(sock_wrapped.getpeercert(True)).hexdigest() - assert cert_hash == cert_pin, "Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin) + if cert_hash != cert_pin: + raise Exception("Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin)) return sock_wrapped else: return sock @@ -58,7 +104,7 @@ class CryptConnectionManager: def removeCerts(self): if config.keep_ssl_cert: return False - for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr"]: + for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]: file_path = "%s/%s" % (config.data_dir, file_name) if os.path.isfile(file_path): os.unlink(file_path) @@ -80,78 +126,96 @@ class CryptConnectionManager: "/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA", "/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA" ] - fakedomains = [ - "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com", - "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com", - "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp", - "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com", - "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com", - "nazwa.pl", "symantec.com" - ] - self.openssl_env['CN'] = random.choice(fakedomains) + self.openssl_env['CN'] = random.choice(self.fakedomains) + environ = os.environ + environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF'] + environ['RANDFILE'] = self.openssl_env['RANDFILE'] + environ['CN'] = self.openssl_env['CN'] if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): + self.createSslContexts() return True # Files already exits import subprocess + + # Replace variables in config template + conf_template = open(self.openssl_conf_template).read() + conf_template = conf_template.replace("$ENV::CN", self.openssl_env['CN']) + open(self.openssl_conf, "w").write(conf_template) + # Generate CAcert and CAkey - cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj %s -keyout %s -out %s -batch -config %s" % helper.shellquote( + cmd_params = helper.shellquote( self.openssl_bin, + self.openssl_conf, random.choice(casubjects), self.cakey_pem, - self.cacert_pem, - self.openssl_env["OPENSSL_CONF"], + self.cacert_pem ) + cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -config %s -subj %s -keyout %s -out %s -batch" % cmd_params + self.log.debug("Generating RSA CAcert and CAkey PEM files...") + self.log.debug("Running: %s" % cmd) proc = subprocess.Popen( - cmd.encode(sys.getfilesystemencoding()), - shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env + cmd, shell=True, stderr=subprocess.STDOUT, + stdout=subprocess.PIPE, env=environ ) - back = proc.stdout.read().strip() + back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") proc.wait() - logging.debug("Generating RSA CAcert and CAkey PEM files...%s" % back) if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)): - logging.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist.") + self.log.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist. (%s)" % back) return False + else: + self.log.debug("Result: %s" % back) # Generate certificate key and signing request - cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % helper.shellquote( + cmd_params = helper.shellquote( self.openssl_bin, self.key_pem, self.cert_csr, "/CN=" + self.openssl_env['CN'], - self.openssl_env["OPENSSL_CONF"], + self.openssl_conf, ) + cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % cmd_params + self.log.debug("Generating certificate key and signing request...") proc = subprocess.Popen( - cmd.encode(sys.getfilesystemencoding()), - shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env + cmd, shell=True, stderr=subprocess.STDOUT, + stdout=subprocess.PIPE, env=environ ) - back = proc.stdout.read().strip() + back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") proc.wait() - logging.debug("Generating certificate key and signing request...%s" % back) + self.log.debug("Running: %s\n%s" % (cmd, back)) # Sign request and generate certificate - cmd = "%s x509 -req -in %s -CA %s -CAkey %s -CAcreateserial -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % helper.shellquote( + cmd_params = helper.shellquote( self.openssl_bin, self.cert_csr, self.cacert_pem, self.cakey_pem, self.cert_pem, - self.openssl_env["OPENSSL_CONF"], + self.openssl_conf ) + cmd = "%s x509 -req -in %s -CA %s -CAkey %s -set_serial 01 -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % cmd_params + self.log.debug("Generating RSA cert...") proc = subprocess.Popen( - cmd.encode(sys.getfilesystemencoding()), - shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env + cmd, shell=True, stderr=subprocess.STDOUT, + stdout=subprocess.PIPE, env=environ ) - back = proc.stdout.read().strip() + back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") proc.wait() - logging.debug("Generating RSA cert...%s" % back) + self.log.debug("Running: %s\n%s" % (cmd, back)) if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): + self.createSslContexts() + + # Remove no longer necessary files + os.unlink(self.openssl_conf) + os.unlink(self.cacert_pem) + os.unlink(self.cakey_pem) + os.unlink(self.cert_csr) + return True else: - logging.error("RSA ECC SSL cert generation failed, cert or key files not exist.") - return False + self.log.error("RSA ECC SSL cert generation failed, cert or key files not exist.") manager = CryptConnectionManager() diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py index 118053b6..f5901fb8 100644 --- a/src/Crypt/CryptHash.py +++ b/src/Crypt/CryptHash.py @@ -3,20 +3,11 @@ import os import base64 -def sha1sum(file, blocksize=65536): - if hasattr(file, "endswith"): # Its a string open it - file = open(file, "rb") - hash = hashlib.sha1() - for block in iter(lambda: file.read(blocksize), ""): - hash.update(block) - return hash.hexdigest() - - def sha512sum(file, blocksize=65536, format="hexdigest"): - if hasattr(file, "endswith"): # Its a string open it + if type(file) is str: # Filename specified file = open(file, "rb") hash = hashlib.sha512() - for block in iter(lambda: file.read(blocksize), ""): + for block in iter(lambda: file.read(blocksize), b""): hash.update(block) # Truncate to 256bits is good enough @@ -26,12 +17,11 @@ def sha512sum(file, blocksize=65536, format="hexdigest"): return hash.digest()[0:32] - def sha256sum(file, blocksize=65536): - if hasattr(file, "endswith"): # Its a string open it + if type(file) is str: # Filename specified file = open(file, "rb") hash = hashlib.sha256() - for block in iter(lambda: file.read(blocksize), ""): + for block in iter(lambda: file.read(blocksize), b""): hash.update(block) return hash.hexdigest() @@ -39,7 +29,7 @@ def sha256sum(file, blocksize=65536): def random(length=64, encoding="hex"): if encoding == "base64": # Characters: A-Za-z0-9 hash = hashlib.sha512(os.urandom(256)).digest() - return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length] + return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length] else: # Characters: a-f0-9 (faster) return hashlib.sha512(os.urandom(256)).hexdigest()[0:length] diff --git a/src/Crypt/CryptRsa.py b/src/Crypt/CryptRsa.py deleted file mode 100644 index 694ef34f..00000000 --- a/src/Crypt/CryptRsa.py +++ /dev/null @@ -1,38 +0,0 @@ -import base64 -import hashlib - -def sign(data, privatekey): - from lib import rsa - from lib.rsa import pkcs1 - - if "BEGIN RSA PRIVATE KEY" not in privatekey: - privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey - - priv = rsa.PrivateKey.load_pkcs1(privatekey) - sign = rsa.pkcs1.sign(data, priv, 'SHA-256') - return sign - -def verify(data, publickey, sign): - from lib import rsa - from lib.rsa import pkcs1 - - pub = rsa.PublicKey.load_pkcs1(publickey, format="DER") - try: - valid = rsa.pkcs1.verify(data, sign, pub) - except pkcs1.VerificationError: - valid = False - return valid - -def privatekeyToPublickey(privatekey): - from lib import rsa - from lib.rsa import pkcs1 - - if "BEGIN RSA PRIVATE KEY" not in privatekey: - privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey - - priv = rsa.PrivateKey.load_pkcs1(privatekey) - pub = rsa.PublicKey(priv.n, priv.e) - return pub.save_pkcs1("DER") - -def publickeyToOnion(publickey): - return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower() diff --git a/src/Crypt/CryptTor.py b/src/Crypt/CryptTor.py new file mode 100644 index 00000000..78ba6fc2 --- /dev/null +++ b/src/Crypt/CryptTor.py @@ -0,0 +1,85 @@ +import base64 +import hashlib + +def sign(data, privatekey): + import rsa + from rsa import pkcs1 + from lib import Ed25519 + + ## Onion Service V3 + if len(privatekey) == 88: + prv_key = base64.b64decode(privatekey) + pub_key = Ed25519.publickey_unsafe(prv_key) + sign = Ed25519.signature_unsafe(data, prv_key, pub_key) + + return sign + + ## Onion Service V2 + if "BEGIN RSA PRIVATE KEY" not in privatekey: + privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey + + priv = rsa.PrivateKey.load_pkcs1(privatekey) + sign = rsa.pkcs1.sign(data, priv, 'SHA-256') + return sign + +def verify(data, publickey, sign): + import rsa + from rsa import pkcs1 + from lib import Ed25519 + + ## Onion Service V3 + if len(publickey) == 32: + + try: + valid = Ed25519.checkvalid(sign, data, publickey) + valid = 'SHA-256' + + except Exception as err: + print(err) + valid = False + + return valid + + ## Onion Service V2 + pub = rsa.PublicKey.load_pkcs1(publickey, format="DER") + + try: + valid = rsa.pkcs1.verify(data, sign, pub) + + except pkcs1.VerificationError: + valid = False + + return valid + +def privatekeyToPublickey(privatekey): + import rsa + from rsa import pkcs1 + from lib import Ed25519 + + ## Onion Service V3 + if len(privatekey) == 88: + prv_key = base64.b64decode(privatekey) + pub_key = Ed25519.publickey_unsafe(prv_key) + + return pub_key + + ## Onion Service V2 + if "BEGIN RSA PRIVATE KEY" not in privatekey: + privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey + + priv = rsa.PrivateKey.load_pkcs1(privatekey) + pub = rsa.PublicKey(priv.n, priv.e) + + return pub.save_pkcs1("DER") + +def publickeyToOnion(publickey): + from lib import Ed25519 + + ## Onion Service V3 + if len(publickey) == 32: + addr = Ed25519.publickey_to_onionaddress(publickey)[:-6] + + return addr + + ## Onion Service V2 + return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii") diff --git a/src/Db/Db.py b/src/Db/Db.py index 186d45fe..d1d9ce15 100644 --- a/src/Db/Db.py +++ b/src/Db/Db.py @@ -4,14 +4,24 @@ import time import logging import re import os +import atexit +import threading +import sys +import weakref +import errno + import gevent from Debug import Debug -from DbCursor import DbCursor -from Config import config +from .DbCursor import DbCursor from util import SafeRe from util import helper +from util import ThreadPool +from Config import config +thread_pool_db = ThreadPool.ThreadPool(config.threads_db) + +next_db_id = 0 opened_dbs = [] @@ -22,51 +32,116 @@ def dbCleanup(): for db in opened_dbs[:]: idle = time.time() - db.last_query_time if idle > 60 * 5 and db.close_idle: - db.close() + db.close("Cleanup") + + +def dbCommitCheck(): + while 1: + time.sleep(5) + for db in opened_dbs[:]: + if not db.need_commit: + continue + + success = db.commit("Interval") + if success: + db.need_commit = False + time.sleep(0.1) + + +def dbCloseAll(): + for db in opened_dbs[:]: + db.close("Close all") + gevent.spawn(dbCleanup) +gevent.spawn(dbCommitCheck) +atexit.register(dbCloseAll) + + +class DbTableError(Exception): + def __init__(self, message, table): + super().__init__(message) + self.table = table class Db(object): def __init__(self, schema, db_path, close_idle=False): + global next_db_id self.db_path = db_path self.db_dir = os.path.dirname(db_path) + "/" self.schema = schema self.schema["version"] = self.schema.get("version", 1) self.conn = None self.cur = None - self.log = logging.getLogger("Db:%s" % schema["db_name"]) + self.cursors = weakref.WeakSet() + self.id = next_db_id + next_db_id += 1 + self.progress_sleeping = False + self.commiting = False + self.log = logging.getLogger("Db#%s:%s" % (self.id, schema["db_name"])) self.table_names = None self.collect_stats = False self.foreign_keys = False + self.need_commit = False self.query_stats = {} self.db_keyvalues = {} self.delayed_queue = [] self.delayed_queue_thread = None self.close_idle = close_idle self.last_query_time = time.time() + self.last_sleep_time = time.time() + self.num_execute_since_sleep = 0 + self.lock = ThreadPool.Lock() + self.connect_lock = ThreadPool.Lock() def __repr__(self): return "" % (id(self), self.db_path, self.close_idle) def connect(self): - if self not in opened_dbs: - opened_dbs.append(self) - s = time.time() - if not os.path.isdir(self.db_dir): # Directory not exist yet - os.makedirs(self.db_dir) - self.log.debug("Created Db path: %s" % self.db_dir) - if not os.path.isfile(self.db_path): - self.log.debug("Db file not exist yet: %s" % self.db_path) - self.conn = sqlite3.connect(self.db_path, check_same_thread=False) - self.conn.row_factory = sqlite3.Row - self.conn.isolation_level = None - self.cur = self.getCursor() - self.log.debug( - "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % - (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) - ) + self.connect_lock.acquire(True) + try: + if self.conn: + self.log.debug("Already connected, connection ignored") + return + + if self not in opened_dbs: + opened_dbs.append(self) + s = time.time() + try: # Directory not exist yet + os.makedirs(self.db_dir) + self.log.debug("Created Db path: %s" % self.db_dir) + except OSError as err: + if err.errno != errno.EEXIST: + raise err + if not os.path.isfile(self.db_path): + self.log.debug("Db file not exist yet: %s" % self.db_path) + self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED", check_same_thread=False) + self.conn.row_factory = sqlite3.Row + self.conn.set_progress_handler(self.progress, 5000000) + self.conn.execute('PRAGMA journal_mode=WAL') + if self.foreign_keys: + self.conn.execute("PRAGMA foreign_keys = ON") + self.cur = self.getCursor() + + self.log.debug( + "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % + (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) + ) + self.log.debug("Connect by thread: %s" % threading.current_thread().ident) + self.log.debug("Connect called by %s" % Debug.formatStack()) + finally: + self.connect_lock.release() + + def getConn(self): + if not self.conn: + self.connect() + return self.conn + + def progress(self, *args, **kwargs): + self.progress_sleeping = True + time.sleep(0.001) + self.progress_sleeping = False # Execute query using dbcursor def execute(self, query, params=None): @@ -74,6 +149,35 @@ class Db(object): self.connect() return self.cur.execute(query, params) + @thread_pool_db.wrap + def commit(self, reason="Unknown"): + if self.progress_sleeping: + self.log.debug("Commit ignored: Progress sleeping") + return False + + if not self.conn: + self.log.debug("Commit ignored: No connection") + return False + + if self.commiting: + self.log.debug("Commit ignored: Already commiting") + return False + + try: + s = time.time() + self.commiting = True + self.conn.commit() + self.log.debug("Commited in %.3fs (reason: %s)" % (time.time() - s, reason)) + return True + except Exception as err: + if "SQL statements in progress" in str(err): + self.log.warning("Commit delayed: %s (reason: %s)" % (Debug.formatException(err), reason)) + else: + self.log.error("Commit error: %s (reason: %s)" % (Debug.formatException(err), reason)) + return False + finally: + self.commiting = False + def insertOrUpdate(self, *args, **kwargs): if not self.conn: self.connect() @@ -98,32 +202,47 @@ class Db(object): s = time.time() cur = self.getCursor() - cur.execute("BEGIN") for command, params in self.delayed_queue: if command == "insertOrUpdate": cur.insertOrUpdate(*params[0], **params[1]) else: cur.execute(*params[0], **params[1]) - cur.execute("END") if len(self.delayed_queue) > 10: self.log.debug("Processed %s delayed queue in %.3fs" % (len(self.delayed_queue), time.time() - s)) self.delayed_queue = [] self.delayed_queue_thread = None - def close(self): + def close(self, reason="Unknown"): + if not self.conn: + return False + self.connect_lock.acquire() s = time.time() if self.delayed_queue: self.processDelayed() if self in opened_dbs: opened_dbs.remove(self) + self.need_commit = False + self.commit("Closing: %s" % reason) + self.log.debug("Close called by %s" % Debug.formatStack()) + for i in range(5): + if len(self.cursors) == 0: + break + self.log.debug("Pending cursors: %s" % len(self.cursors)) + time.sleep(0.1 * i) + if len(self.cursors): + self.log.debug("Killing cursors: %s" % len(self.cursors)) + self.conn.interrupt() + if self.cur: self.cur.close() if self.conn: - self.conn.close() + ThreadPool.main_loop.call(self.conn.close) self.conn = None self.cur = None - self.log.debug("%s closed in %.3fs, opened: %s" % (self.db_path, time.time() - s, len(opened_dbs))) + self.log.debug("%s closed (reason: %s) in %.3fs, opened: %s" % (self.db_path, reason, time.time() - s, len(opened_dbs))) + self.connect_lock.release() + return True # Gets a cursor object to database # Return: Cursor class @@ -131,26 +250,22 @@ class Db(object): if not self.conn: self.connect() - cur = DbCursor(self.conn, self) - if config.db_mode == "security": - cur.execute("PRAGMA journal_mode = WAL") - cur.execute("PRAGMA synchronous = NORMAL") - else: - cur.execute("PRAGMA journal_mode = MEMORY") - cur.execute("PRAGMA synchronous = OFF") - if self.foreign_keys: - cur.execute("PRAGMA foreign_keys = ON") - + cur = DbCursor(self) return cur + def getSharedCursor(self): + if not self.conn: + self.connect() + return self.cur + # Get the table version # Return: Table version or None if not exist def getTableVersion(self, table_name): if not self.db_keyvalues: # Get db keyvalues try: res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues - except sqlite3.OperationalError, err: # Table not exist - self.log.debug("Query error: %s" % err) + except sqlite3.OperationalError as err: # Table not exist + self.log.debug("Query table version error: %s" % err) return False for row in res: @@ -163,9 +278,8 @@ class Db(object): def checkTables(self): s = time.time() changed_tables = [] - cur = self.getCursor() - cur.execute("BEGIN") + cur = self.getSharedCursor() # Check internal tables # Check keyvalue table @@ -212,16 +326,18 @@ class Db(object): # Check schema tables for table_name, table_settings in self.schema.get("tables", {}).items(): try: + indexes = table_settings.get("indexes", []) + version = table_settings.get("schema_changed", 0) changed = cur.needTable( table_name, table_settings["cols"], - table_settings.get("indexes", []), version=table_settings.get("schema_changed", 0) + indexes, version=version ) if changed: changed_tables.append(table_name) except Exception as err: self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err))) + raise DbTableError(err, table_name) - cur.execute("COMMIT") self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables)) if changed_tables: self.db_keyvalues = {} # Refresh table version cache @@ -257,24 +373,23 @@ class Db(object): data = {} else: if file_path.endswith("json.gz"): - data = json.load(helper.limitedGzipFile(fileobj=file)) + file = helper.limitedGzipFile(fileobj=file) + + if sys.version_info.major == 3 and sys.version_info.minor < 6: + data = json.loads(file.read().decode("utf8")) else: data = json.load(file) - except Exception, err: + except Exception as err: self.log.debug("Json file %s load error: %s" % (file_path, err)) data = {} # No cursor specificed if not cur: - cur = self.getCursor() - cur.execute("BEGIN") + cur = self.getSharedCursor() cur.logging = False - commit_after_done = True - else: - commit_after_done = False # Row for current json file if required - if not data or filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps): + if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]: json_row = cur.getJsonRow(relative_path) # Check matched mappings in schema @@ -311,7 +426,7 @@ class Db(object): changed = True if changed: # Add the custom col values - data_json_row.update({key: val for key, val in data.iteritems() if key in dbmap["to_json_table"]}) + data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]}) cur.execute("INSERT OR REPLACE INTO json ?", data_json_row) # Insert data to tables @@ -333,7 +448,7 @@ class Db(object): # Fill import cols from table cols if not import_cols: - import_cols = set(map(lambda item: item[0], self.schema["tables"][table_name]["cols"])) + import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]]) cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],)) @@ -341,7 +456,7 @@ class Db(object): continue if key_col: # Map as dict - for key, val in data[node].iteritems(): + for key, val in data[node].items(): if val_col: # Single value cur.execute( "INSERT OR REPLACE INTO %s ?" % table_name, @@ -355,9 +470,9 @@ class Db(object): row[key_col] = key # Replace in value if necessary if replaces: - for replace_key, replace in replaces.iteritems(): + for replace_key, replace in replaces.items(): if replace_key in row: - for replace_from, replace_to in replace.iteritems(): + for replace_from, replace_to in replace.items(): row[replace_key] = row[replace_key].replace(replace_from, replace_to) row["json_id"] = json_row["json_id"] @@ -379,8 +494,6 @@ class Db(object): self.log.debug("Cleanup json row for %s" % file_path) cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"]) - if commit_after_done: - cur.execute("COMMIT") return True @@ -394,7 +507,6 @@ if __name__ == "__main__": dbjson.collect_stats = True dbjson.checkTables() cur = dbjson.getCursor() - cur.execute("BEGIN") cur.logging = False dbjson.updateJson("data/users/content.json", cur=cur) for user_dir in os.listdir("data/users"): @@ -402,7 +514,6 @@ if __name__ == "__main__": dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur) # print ".", cur.logging = True - cur.execute("COMMIT") - print "Done in %.3fs" % (time.time() - s) + print("Done in %.3fs" % (time.time() - s)) for query, stats in sorted(dbjson.query_stats.items()): - print "-", query, stats + print("-", query, stats) diff --git a/src/Db/DbCursor.py b/src/Db/DbCursor.py index f397ff0c..acb8846d 100644 --- a/src/Db/DbCursor.py +++ b/src/Db/DbCursor.py @@ -2,22 +2,25 @@ import time import re from util import helper - # Special sqlite cursor class DbCursor: - def __init__(self, conn, db): - self.conn = conn + def __init__(self, db): self.db = db - self.cursor = conn.cursor() self.logging = False - def execute(self, query, params=None): - self.db.last_query_time = time.time() + def quoteValue(self, value): + if type(value) is int: + return str(value) + else: + return "'%s'" % value.replace("'", "''") + + def parseQuery(self, query, params): + query_type = query.split(" ", 1)[0].upper() if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params - if query.startswith("SELECT") or query.startswith("DELETE") or query.startswith("UPDATE"): + if query_type in ("SELECT", "DELETE", "UPDATE"): # Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format query_wheres = [] values = [] @@ -35,12 +38,15 @@ class DbCursor: else: query_values = ",".join(["?"] * len(value)) values += value - query_wheres.append("%s %s (%s)" % + query_wheres.append( + "%s %s (%s)" % (field, operator, query_values) ) else: if key.startswith("not__"): query_wheres.append(key.replace("not__", "") + " != ?") + elif key.endswith("__like"): + query_wheres.append(key.replace("__like", "") + " LIKE ?") elif key.endswith(">"): query_wheres.append(key.replace(">", "") + " > ?") elif key.endswith("<"): @@ -74,18 +80,40 @@ class DbCursor: new_params[key] = value params = new_params + return query, params + def execute(self, query, params=None): + query = query.strip() + while self.db.progress_sleeping or self.db.commiting: + time.sleep(0.1) - s = time.time() + self.db.last_query_time = time.time() - if params: # Query has parameters - res = self.cursor.execute(query, params) - if self.logging: - self.db.log.debug(query + " " + str(params) + " (Done in %.4f)" % (time.time() - s)) - else: - res = self.cursor.execute(query) - if self.logging: - self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s)) + query, params = self.parseQuery(query, params) + + cursor = self.db.getConn().cursor() + self.db.cursors.add(cursor) + if self.db.lock.locked(): + self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock)) + + try: + s = time.time() + self.db.lock.acquire(True) + if query.upper().strip("; ") == "VACUUM": + self.db.commit("vacuum called") + if params: + res = cursor.execute(query, params) + else: + res = cursor.execute(query) + finally: + self.db.lock.release() + + taken_query = time.time() - s + if self.logging or taken_query > 1: + if params: # Query has parameters + self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s)) + else: + self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s)) # Log query stats if self.db.collect_stats: @@ -94,7 +122,39 @@ class DbCursor: self.db.query_stats[query]["call"] += 1 self.db.query_stats[query]["time"] += time.time() - s - return res + query_type = query.split(" ", 1)[0].upper() + is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"] + if not self.db.need_commit and is_update_query: + self.db.need_commit = True + + if is_update_query: + return cursor + else: + return res + + def executemany(self, query, params): + while self.db.progress_sleeping or self.db.commiting: + time.sleep(0.1) + + self.db.last_query_time = time.time() + + s = time.time() + cursor = self.db.getConn().cursor() + self.db.cursors.add(cursor) + + try: + self.db.lock.acquire(True) + cursor.executemany(query, params) + finally: + self.db.lock.release() + + taken_query = time.time() - s + if self.logging or taken_query > 0.1: + self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query)) + + self.db.need_commit = True + + return cursor # Creates on updates a database row without incrementing the rowid def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}): @@ -103,11 +163,11 @@ class DbCursor: params = query_sets params.update(query_wheres) - self.cursor.execute( + res = self.execute( "UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)), params ) - if self.cursor.rowcount == 0: + if res.rowcount == 0: params.update(oninsert) # Add insert-only fields self.execute("INSERT INTO %s ?" % table, params) @@ -137,7 +197,7 @@ class DbCursor: def needTable(self, table, cols, indexes=None, version=1): current_version = self.db.getTableVersion(table) if int(current_version) < int(version): # Table need update or not extis - self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) + self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) self.createTable(table, cols) if indexes: self.createIndexes(table, indexes) @@ -183,4 +243,4 @@ class DbCursor: return row def close(self): - self.cursor.close() + pass diff --git a/src/Db/DbQuery.py b/src/Db/DbQuery.py index a7730d5b..3fb5ef73 100644 --- a/src/Db/DbQuery.py +++ b/src/Db/DbQuery.py @@ -9,9 +9,9 @@ class DbQuery: # Split main parts of query def parseParts(self, query): parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query) - parts = filter(None, parts) # Remove empty parts - parts = map(lambda s: s.strip(), parts) # Remove whitespace - return dict(zip(parts[0::2], parts[1::2])) + parts = [_f for _f in parts if _f] # Remove empty parts + parts = [s.strip() for s in parts] # Remove whitespace + return dict(list(zip(parts[0::2], parts[1::2]))) # Parse selected fields SELECT ... FROM def parseFields(self, query_select): diff --git a/src/Db/__init__.py b/src/Db/__init__.py index 5bede9f4..e69de29b 100644 --- a/src/Db/__init__.py +++ b/src/Db/__init__.py @@ -1,3 +0,0 @@ -from Db import Db -from DbQuery import DbQuery -from DbCursor import DbCursor \ No newline at end of file diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py index 960d260c..0ec42615 100644 --- a/src/Debug/Debug.py +++ b/src/Debug/Debug.py @@ -1,48 +1,140 @@ import sys import os +import re from Config import config # Non fatal exception class Notify(Exception): - def __init__(self, message): - self.message = message + def __init__(self, message=None): + if message: + self.message = message def __str__(self): return self.message +# Gevent greenlet.kill accept Exception type +def createNotifyType(message): + return type("Notify", (Notify, ), {"message": message}) + + +def formatExceptionMessage(err): + err_type = err.__class__.__name__ + if err.args: + err_message = err.args[-1] + else: + err_message = err.__str__() + return "%s: %s" % (err_type, err_message) + + +python_lib_dirs = [path.replace("\\", "/") for path in sys.path if re.sub(r".*[\\/]", "", path) in ("site-packages", "dist-packages")] +python_lib_dirs.append(os.path.dirname(os.__file__).replace("\\", "/")) # TODO: check if returns the correct path for PyPy + +root_dir = os.path.realpath(os.path.dirname(__file__) + "/../../") +root_dir = root_dir.replace("\\", "/") + + +def formatTraceback(items, limit=None, fold_builtin=True): + back = [] + i = 0 + prev_file_title = "" + is_prev_builtin = False + + for path, line in items: + i += 1 + is_last = i == len(items) + path = path.replace("\\", "/") + + if path.startswith("src/gevent/"): + file_title = "/" + path[len("src/gevent/"):] + is_builtin = True + is_skippable_builtin = False + elif path in ("", ""): + file_title = "(importlib)" + is_builtin = True + is_skippable_builtin = True + else: + is_skippable_builtin = False + for base in python_lib_dirs: + if path.startswith(base + "/"): + file_title = path[len(base + "/"):] + module_name, *tail = file_title.split("/") + if module_name.endswith(".py"): + module_name = module_name[:-3] + file_title = "/".join(["<%s>" % module_name] + tail) + is_builtin = True + break + else: + is_builtin = False + for base in (root_dir + "/src", root_dir + "/plugins", root_dir): + if path.startswith(base + "/"): + file_title = path[len(base + "/"):] + break + else: + # For unknown paths, do our best to hide absolute path + file_title = path + for needle in ("/zeronet/", "/core/"): + if needle in file_title.lower(): + file_title = "?/" + file_title[file_title.lower().rindex(needle) + len(needle):] + + # Path compression: A/AB/ABC/X/Y.py -> ABC/X/Y.py + # E.g.: in 'Db/DbCursor.py' the directory part is unnecessary + if not file_title.startswith("/"): + prev_part = "" + for i, part in enumerate(file_title.split("/") + [""]): + if not part.startswith(prev_part): + break + prev_part = part + file_title = "/".join(file_title.split("/")[i - 1:]) + + if is_skippable_builtin and fold_builtin: + pass + elif is_builtin and is_prev_builtin and not is_last and fold_builtin: + if back[-1] != "...": + back.append("...") + else: + if file_title == prev_file_title: + back.append("%s" % line) + else: + back.append("%s line %s" % (file_title, line)) + + prev_file_title = file_title + is_prev_builtin = is_builtin + + if limit and i >= limit: + back.append("...") + break + return back + + def formatException(err=None, format="text"): import traceback if type(err) == Notify: return err - elif type(err) == tuple and err[0] is not None: # Passed trackeback info + elif type(err) == tuple and err and err[0] is not None: # Passed trackeback info exc_type, exc_obj, exc_tb = err err = None else: # No trackeback info passed, get latest exc_type, exc_obj, exc_tb = sys.exc_info() if not err: - err = exc_obj.message - tb = [] - for frame in traceback.extract_tb(exc_tb): - path, line, function, text = frame - file = os.path.split(path)[1] - tb.append("%s line %s" % (file, line)) + if hasattr(err, "message"): + err = exc_obj.message + else: + err = exc_obj + + tb = formatTraceback([[frame[0], frame[1]] for frame in traceback.extract_tb(exc_tb)]) if format == "html": - return "%s: %s
    %s" % (exc_type.__name__, err, " > ".join(tb)) + return "%s: %s
    %s" % (repr(err), err, " > ".join(tb)) else: return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb)) -def formatStack(): +def formatStack(limit=None): import inspect - back = [] - for stack in inspect.stack(): - frame, path, line, function, source, index = stack - file = os.path.split(path)[1] - back.append("%s line %s" % (file, line)) - return " > ".join(back) + tb = formatTraceback([[frame[1], frame[2]] for frame in inspect.stack()[1:]], limit=limit) + return " > ".join(tb) # Test if gevent eventloop blocks @@ -50,23 +142,31 @@ import logging import gevent import time + +num_block = 0 + + def testBlock(): + global num_block logging.debug("Gevent block checker started") last_time = time.time() while 1: time.sleep(1) if time.time() - last_time > 1.1: - logging.debug("Gevent block detected: %s" % (time.time() - last_time - 1)) + logging.debug("Gevent block detected: %.3fs" % (time.time() - last_time - 1)) + num_block += 1 last_time = time.time() + + gevent.spawn(testBlock) if __name__ == "__main__": try: - print 1 / 0 - except Exception, err: - print type(err).__name__ - print "1/0 error: %s" % formatException(err) + print(1 / 0) + except Exception as err: + print(type(err).__name__) + print("1/0 error: %s" % formatException(err)) def loadJson(): json.loads("Errr") @@ -74,13 +174,13 @@ if __name__ == "__main__": import json try: loadJson() - except Exception, err: - print err - print "Json load error: %s" % formatException(err) + except Exception as err: + print(err) + print("Json load error: %s" % formatException(err)) try: raise Notify("nothing...") - except Exception, err: - print "Notify: %s" % formatException(err) + except Exception as err: + print("Notify: %s" % formatException(err)) loadJson() diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py index 1e96125d..d100a3b8 100644 --- a/src/Debug/DebugHook.py +++ b/src/Debug/DebugHook.py @@ -1,24 +1,26 @@ import sys import logging import signal +import importlib import gevent import gevent.hub from Config import config +from . import Debug last_error = None def shutdown(reason="Unknown"): logging.info("Shutting down (reason: %s)..." % reason) - if "file_server" in dir(sys.modules["main"]) and sys.modules["main"].file_server.running: + import main + if "file_server" in dir(main): try: - if "file_server" in dir(sys.modules["main"]): - gevent.spawn(sys.modules["main"].file_server.stop) - if "ui_server" in dir(sys.modules["main"]): - gevent.spawn(sys.modules["main"].ui_server.stop) + gevent.spawn(main.file_server.stop) + if "ui_server" in dir(main): + gevent.spawn(main.ui_server.stop) except Exception as err: - print "Proper shutdown error: %s" % err + print("Proper shutdown error: %s" % err) sys.exit(0) else: sys.exit(0) @@ -48,7 +50,7 @@ def handleErrorNotify(*args, **kwargs): if err.__name__ == "KeyboardInterrupt": shutdown("Keyboard interrupt") elif err.__name__ != "Notify": - logging.error("Unhandled exception: %s" % [args]) + logging.error("Unhandled exception: %s" % Debug.formatException(args)) sys.__excepthook__(*args, **kwargs) @@ -69,17 +71,22 @@ else: sys.excepthook(exc_info[0], exc_info[1], exc_info[2]) gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet - reload(gevent) + importlib.reload(gevent) + +def handleGreenletError(context, type, value, tb): + if context.__class__ is tuple and context[0].__class__.__name__ == "ThreadPool": + # Exceptions in ThreadPool will be handled in the main Thread + return None -def handleGreenletError(self, context, type, value, tb): if isinstance(value, str): # Cython can raise errors where the value is a plain string # e.g., AttributeError, "_semaphore.Semaphore has no attr", value = type(value) - if not issubclass(type, self.NOT_ERROR): + + if not issubclass(type, gevent.get_hub().NOT_ERROR): sys.excepthook(type, value, tb) -gevent.hub.Hub.handle_error = handleGreenletError +gevent.get_hub().handle_error = handleGreenletError try: signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM")) @@ -91,18 +98,18 @@ if __name__ == "__main__": import time from gevent import monkey monkey.patch_all(thread=False, ssl=False) - import Debug + from . import Debug def sleeper(num): - print "started", num + print("started", num) time.sleep(3) raise Exception("Error") - print "stopped", num + print("stopped", num) thread1 = gevent.spawn(sleeper, 1) thread2 = gevent.spawn(sleeper, 2) time.sleep(1) - print "killing..." + print("killing...") thread1.kill(exception=Debug.Notify("Worker stopped")) #thread2.throw(Debug.Notify("Throw")) - print "killed" + print("killed") gevent.joinall([thread1,thread2]) diff --git a/src/Debug/DebugLock.py b/src/Debug/DebugLock.py new file mode 100644 index 00000000..9cf22520 --- /dev/null +++ b/src/Debug/DebugLock.py @@ -0,0 +1,24 @@ +import time +import logging + +import gevent.lock + +from Debug import Debug + + +class DebugLock: + def __init__(self, log_after=0.01, name="Lock"): + self.name = name + self.log_after = log_after + self.lock = gevent.lock.Semaphore(1) + self.release = self.lock.release + + def acquire(self, *args, **kwargs): + s = time.time() + res = self.lock.acquire(*args, **kwargs) + time_taken = time.time() - s + if time_taken >= self.log_after: + logging.debug("%s: Waited %.3fs after called by %s" % + (self.name, time_taken, Debug.formatStack()) + ) + return res diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py index 4f576860..a892dc56 100644 --- a/src/Debug/DebugMedia.py +++ b/src/Debug/DebugMedia.py @@ -3,6 +3,7 @@ import subprocess import re import logging import time +import functools from Config import config from util import helper @@ -18,9 +19,9 @@ def findfiles(path, find_ext): elif f2 == "": return -1 else: - return cmp(f1.lower(), f2.lower()) + return helper.cmp(f1.lower(), f2.lower()) - for root, dirs, files in sorted(os.walk(path, topdown=False), cmp=sorter): + for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)): for file in sorted(files): file_path = root + "/" + file file_ext = file.split(".")[-1] @@ -44,6 +45,7 @@ def findCoffeescriptCompiler(): # Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features def merge(merged_path): + merged_path = merged_path.replace("\\", "/") merge_dir = os.path.dirname(merged_path) s = time.time() ext = merged_path.split(".")[-1] @@ -65,19 +67,21 @@ def merge(merged_path): if not changed: return # Assets not changed, nothing to do + old_parts = {} if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile - merged_old = open(merged_path, "rb").read().decode("utf8") - old_parts = {} - for match in re.findall(r"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): - old_parts[match[1]] = match[2].strip("\n\r") + merged_old = open(merged_path, "rb").read() + for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): + old_parts[match[1].decode()] = match[2].strip(b"\n\r") + logging.debug("Merging %s (changed: %s, old parts: %s)" % (merged_path, changed, len(old_parts))) # Merge files parts = [] s_total = time.time() for file_path in findfiles(merge_dir, find_ext): - parts.append("\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "")) + file_relative_path = file_path.replace(merge_dir + "/", "") + parts.append(b"\n/* ---- %s ---- */\n\n" % file_relative_path.encode("utf8")) if file_path.endswith(".coffee"): # Compile coffee script - if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before + if file_path in changed or file_relative_path not in old_parts: # Only recompile if changed or its not compiled before if config.coffeescript_compiler is None: config.coffeescript_compiler = findCoffeescriptCompiler() if not config.coffeescript_compiler: @@ -88,38 +92,39 @@ def merge(merged_path): file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep)) if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file - command = config.coffeescript_compiler % file_path_escaped + command = config.coffeescript_compiler.replace("%s", file_path_escaped) else: # Put coffeescript file to end command = config.coffeescript_compiler + " " + file_path_escaped # Start compiling s = time.time() compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) - out = compiler.stdout.read().decode("utf8") + out = compiler.stdout.read() compiler.wait() logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s)) # Check errors - if out and out.startswith("("): # No error found + if out and out.startswith(b"("): # No error found parts.append(out) else: # Put error message in place of source code error = out - logging.error("%s Compile error: %s" % (file_path, error)) + logging.error("%s Compile error: %s" % (file_relative_path, error)) + error_escaped = re.escape(error).replace(b"\n", b"\\n").replace(br"\\n", br"\n") parts.append( - "alert('%s compile error: %s');" % - (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n")) + b"alert('%s compile error: %s');" % + (file_relative_path.encode(), error_escaped) ) else: # Not changed use the old_part - parts.append(old_parts[file_path.replace(config.data_dir, "")]) + parts.append(old_parts[file_relative_path]) else: # Add to parts - parts.append(open(file_path).read().decode("utf8")) + parts.append(open(file_path, "rb").read()) - merged = u"\n".join(parts) + merged = b"\n".join(parts) if ext == "css": # Vendor prefix css from lib.cssvendor import cssvendor merged = cssvendor.prefix(merged) - merged = merged.replace("\r", "") - open(merged_path, "wb").write(merged.encode("utf8")) + merged = merged.replace(b"\r", b"") + open(merged_path, "wb").write(merged) logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total)) diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py index 247f4432..482c7921 100644 --- a/src/Debug/DebugReloader.py +++ b/src/Debug/DebugReloader.py @@ -1,53 +1,69 @@ import logging import time -import threading +import os from Config import config -if config.debug: # Only load pyfilesytem if using debug mode +if config.debug and config.action == "main": try: - import fs.watch - import fs.osfs - pyfilesystem = fs.osfs.OSFS("src") - pyfilesystem_plugins = fs.osfs.OSFS("plugins") - logging.debug("Pyfilesystem detected, source code autoreload enabled") - except Exception, err: - pyfilesystem = False + import watchdog + import watchdog.observers + import watchdog.events + logging.debug("Watchdog fs listener detected, source code autoreload enabled") + enabled = True + except Exception as err: + logging.debug("Watchdog fs listener could not be loaded: %s" % err) + enabled = False else: - pyfilesystem = False + enabled = False class DebugReloader: - - def __init__(self, callback, directory="/"): + def __init__(self, paths=None): + if not paths: + paths = ["src", "plugins", config.data_dir + "/__plugins__"] + self.log = logging.getLogger("DebugReloader") self.last_chaged = 0 - if pyfilesystem: - self.directory = directory - self.callback = callback - if config.action == "main": - logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback)) - thread = threading.Thread(target=self.addWatcher) - thread.daemon = True - thread.start() + self.callbacks = [] + if enabled: + self.observer = watchdog.observers.Observer() + event_handler = watchdog.events.FileSystemEventHandler() + event_handler.on_modified = event_handler.on_deleted = self.onChanged + event_handler.on_created = event_handler.on_moved = self.onChanged + for path in paths: + if not os.path.isdir(path): + continue + self.log.debug("Adding autoreload: %s" % path) + self.observer.schedule(event_handler, path, recursive=True) + self.observer.start() - def addWatcher(self, recursive=True): - try: - time.sleep(1) # Wait for .pyc compiles - watch_events = [fs.watch.CREATED, fs.watch.MODIFIED] - pyfilesystem.add_watcher(self.changed, path=self.directory, events=watch_events, recursive=recursive) - pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=watch_events, recursive=recursive) - except Exception, err: - print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err + def addCallback(self, f): + self.callbacks.append(f) - def changed(self, evt): - if ( - not evt.path or "%s/" % config.data_dir in evt.path or - (not evt.path.endswith("py") and not evt.path.endswith("json")) or - "Test" in evt.path or - time.time() - self.last_chaged < 5.0 - ): - return False # Ignore *.pyc changes and no reload within 1 sec + def onChanged(self, evt): + path = evt.src_path + ext = path.rsplit(".", 1)[-1] + if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0: + return False self.last_chaged = time.time() - logging.debug("File changed: %s, cb: %s reloading source code" % (evt.path, self.callback)) + if os.path.isfile(path): + time_modified = os.path.getmtime(path) + else: + time_modified = 0 + self.log.debug("File changed: %s reloading source code (modified %.3fs ago)" % (evt, time.time() - time_modified)) + if time.time() - time_modified > 5: # Probably it's just an attribute change, ignore it + return False + time.sleep(0.1) # Wait for lock release - self.callback() + for callback in self.callbacks: + try: + callback() + except Exception as err: + self.log.exception(err) + + def stop(self): + if enabled: + self.observer.stop() + self.log.debug("Stopped autoreload observer") + +watcher = DebugReloader() diff --git a/src/Debug/__init__.py b/src/Debug/__init__.py index 8632f92f..e69de29b 100644 --- a/src/Debug/__init__.py +++ b/src/Debug/__init__.py @@ -1 +0,0 @@ -from DebugReloader import DebugReloader \ No newline at end of file diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index b3a13f7f..c082c378 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -4,7 +4,6 @@ import time import json import collections import itertools -import socket # Third party modules import gevent @@ -12,7 +11,7 @@ import gevent from Debug import Debug from Config import config from util import RateLimit -from util import StreamingMsgpack +from util import Msgpack from util import helper from Plugin import PluginManager from contextlib import closing @@ -103,61 +102,82 @@ class FileRequest(object): # Update a site file request def actionUpdate(self, params): site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving + if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(1) self.connection.badAction(5) return False inner_path = params.get("inner_path", "") - if not inner_path.endswith("content.json"): self.response({"error": "Only content.json update allowed"}) self.connection.badAction(5) return - try: - content = json.loads(params["body"]) - except Exception, err: - self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) - self.response({"error": "File invalid JSON"}) - self.connection.badAction(5) - return - - file_uri = "%s/%s:%s" % (site.address, inner_path, content["modified"]) - - if self.server.files_parsing.get(file_uri): # Check if we already working on it - valid = None # Same file - else: + current_content_modified = site.content_manager.contents.get(inner_path, {}).get("modified", 0) + should_validate_content = True + if "modified" in params and params["modified"] <= current_content_modified: + should_validate_content = False + valid = None # Same or earlier content as we have + + body = params["body"] + if not body: # No body sent, we have to download it first + site.log.debug("Missing body from update for file %s, downloading ..." % inner_path) + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer try: - valid = site.content_manager.verifyFile(inner_path, content) - except Exception, err: - self.log.debug("Update for %s is invalid: %s" % (inner_path, err)) - valid = False + body = peer.getFile(site.address, inner_path).read() + except Exception as err: + site.log.debug("Can't download updated file %s: %s" % (inner_path, err)) + self.response({"error": "Invalid File update: Failed to download updated file content"}) + self.connection.badAction(5) + return + + if should_validate_content: + try: + if type(body) is str: + body = body.encode() + # elif type(body) is list: + # content = json.loads(bytes(list).decode()) + content = json.loads(body.decode()) + except Exception as err: + site.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) + self.response({"error": "File invalid JSON"}) + self.connection.badAction(5) + return + + file_uri = "%s/%s:%s" % (site.address, inner_path, content["modified"]) + + if self.server.files_parsing.get(file_uri): # Check if we already working on it + valid = None # Same file + else: + try: + valid = site.content_manager.verifyFile(inner_path, content) + except Exception as err: + site.log.debug("Update for %s is invalid: %s" % (inner_path, err)) + error = err + valid = False if valid is True: # Valid and changed site.log.info("Update for %s looks valid, saving..." % inner_path) self.server.files_parsing[file_uri] = True - site.storage.write(inner_path, params["body"]) + site.storage.write(inner_path, body) del params["body"] site.onFileDone(inner_path) # Trigger filedone - if inner_path.endswith("content.json"): # Download every changed file from peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer - # On complete publish to other peers - diffs = params.get("diffs", {}) - site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=3), "publish_%s" % inner_path) + # Download every changed file from peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer + # On complete publish to other peers + diffs = params.get("diffs", {}) + site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path) - # Load new content file and download changed files in new thread - def downloader(): - site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) - del self.server.files_parsing[file_uri] - - gevent.spawn(downloader) - else: + # Load new content file and download changed files in new thread + def downloader(): + site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) del self.server.files_parsing[file_uri] + gevent.spawn(downloader) + self.response({"ok": "Thanks, file %s updated!" % inner_path}) self.connection.goodAction() @@ -169,7 +189,7 @@ class FileRequest(object): if inner_path in site.content_manager.contents: peer.last_content_json_update = site.content_manager.contents[inner_path]["modified"] if config.verbose: - self.log.debug( + site.log.debug( "Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) @@ -182,7 +202,7 @@ class FileRequest(object): self.connection.badAction() else: # Invalid sign or sha hash - self.response({"error": "File invalid: %s" % err}) + self.response({"error": "File %s invalid: %s" % (inner_path, error)}) self.connection.badAction(5) def isReadable(self, site, inner_path, file, pos): @@ -191,7 +211,7 @@ class FileRequest(object): # Send file content request def handleGetFile(self, params, streaming=False): site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving + if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False @@ -200,7 +220,7 @@ class FileRequest(object): if streaming: file_obj = site.storage.open(params["inner_path"]) else: - file_obj = StreamingMsgpack.FilePart(file_path, "rb") + file_obj = Msgpack.FilePart(file_path, "rb") with file_obj as file: file.seek(params["location"]) @@ -218,7 +238,6 @@ class FileRequest(object): if not streaming: file.read_bytes = read_bytes - if params["location"] > file_size: self.connection.badAction(5) raise RequestError("Bad file location") @@ -251,14 +270,18 @@ class FileRequest(object): return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]} - except RequestError, err: - self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err))) + except RequestError as err: + self.log.debug("GetFile %s %s %s request error: %s" % (self.connection, params["site"], params["inner_path"], Debug.formatException(err))) self.response({"error": "File read error: %s" % err}) - except Exception, err: + except OSError as err: if config.verbose: self.log.debug("GetFile read error: %s" % Debug.formatException(err)) self.response({"error": "File read error"}) return False + except Exception as err: + self.log.error("GetFile exception: %s" % Debug.formatException(err)) + self.response({"error": "File read exception"}) + return False def actionGetFile(self, params): return self.handleGetFile(params) @@ -269,7 +292,7 @@ class FileRequest(object): # Peer exchange request def actionPex(self, params): site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving + if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False @@ -306,7 +329,7 @@ class FileRequest(object): if config.verbose: self.log.debug( "Added %s peers to %s using pex, sending back %s" % - (added, site, {key: len(val) for key, val in packed_peers.iteritems()}) + (added, site, {key: len(val) for key, val in packed_peers.items()}) ) back = { @@ -320,7 +343,7 @@ class FileRequest(object): # Get modified content.json files since def actionListModified(self, params): site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving + if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False @@ -335,7 +358,7 @@ class FileRequest(object): def actionGetHashfield(self, params): site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving + if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False @@ -347,13 +370,13 @@ class FileRequest(object): peer.time_my_hashfield_sent = time.time() # Don't send again if not changed - self.response({"hashfield_raw": site.content_manager.hashfield.tostring()}) + self.response({"hashfield_raw": site.content_manager.hashfield.tobytes()}) def findHashIds(self, site, hash_ids, limit=100): back = collections.defaultdict(lambda: collections.defaultdict(list)) found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit) - for hash_id, peers in found.iteritems(): + for hash_id, peers in found.items(): for peer in peers: ip_type = helper.getIpType(peer.ip) if len(back[ip_type][hash_id]) < 20: @@ -363,7 +386,7 @@ class FileRequest(object): def actionFindHashIds(self, params): site = self.sites.get(params["site"]) s = time.time() - if not site or not site.settings["serving"]: # Site unknown or not serving + if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False @@ -385,13 +408,13 @@ class FileRequest(object): if config.verbose: self.log.debug( "Found: %s for %s hashids in %.3fs" % - ({key: len(val) for key, val in back.iteritems()}, len(params["hash_ids"]), time.time() - s) + ({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s) ) self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes}) def actionSetHashfield(self, params): site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving + if not site or not site.isServing(): # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False @@ -400,12 +423,12 @@ class FileRequest(object): peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection, source="request") if not peer.connection: peer.connect(self.connection) - peer.hashfield.replaceFromString(params["hashfield_raw"]) + peer.hashfield.replaceFromBytes(params["hashfield_raw"]) self.response({"ok": "Updated"}) # Send a simple Pong! answer def actionPing(self, params): - self.response("Pong!") + self.response(b"Pong!") # Check requested port of the other peer def actionCheckport(self, params): diff --git a/src/File/FileServer.py b/src/File/FileServer.py index 0e167a6f..b7a942fc 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -2,6 +2,7 @@ import logging import time import random import socket +import sys import gevent import gevent.pool @@ -10,7 +11,7 @@ from gevent.server import StreamServer import util from util import helper from Config import config -from FileRequest import FileRequest +from .FileRequest import FileRequest from Peer import PeerPortchecker from Site import SiteManager from Connection import ConnectionServer @@ -41,13 +42,14 @@ class FileServer(ConnectionServer): port = config.tor_hs_port config.fileserver_port = port elif port == 0: # Use random port - port_range_from, port_range_to = map(int, config.fileserver_port_range.split("-")) + port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-"))) port = self.getRandomPort(ip, port_range_from, port_range_to) config.fileserver_port = port if not port: raise Exception("Can't find bindable port") if not config.tor == "always": config.saveValue("fileserver_port", port) # Save random port value for next restart + config.arguments.fileserver_port = port ConnectionServer.__init__(self, ip, port, self.handleRequest) self.log.debug("Supported IP types: %s" % self.supported_ip_types) @@ -59,17 +61,23 @@ class FileServer(ConnectionServer): self.stream_server_proxy = StreamServer( ("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 ) - except Exception, err: + except Exception as err: self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err)) self.port_opened = {} - self.sites = {} + self.sites = self.site_manager.sites self.last_request = time.time() self.files_parsing = {} self.ui_server = None def getRandomPort(self, ip, port_range_from, port_range_to): + """Generates Random Port from given range + Args: + ip: IP Address + port_range_from: From Range + port_range_to: to Range + """ self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to)) tried = [] for bind_retry in range(100): @@ -108,7 +116,7 @@ class FileServer(ConnectionServer): self.log.debug("IPv6 supported on IP %s" % local_ipv6) return True except socket.error as err: - self.log.error("IPv6 not supported: %s" % err) + self.log.warning("IPv6 not supported: %s" % err) return False except Exception as err: self.log.error("IPv6 check error: %s" % err) @@ -117,7 +125,7 @@ class FileServer(ConnectionServer): def listenProxy(self): try: self.stream_server_proxy.serve_forever() - except Exception, err: + except Exception as err: if err.errno == 98: # Address already in use error self.log.debug("StreamServer proxy listen error: %s" % err) else: @@ -150,6 +158,12 @@ class FileServer(ConnectionServer): FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest def portCheck(self): + if config.offline: + self.log.info("Offline mode: port check disabled") + res = {"ipv4": None, "ipv6": None} + self.port_opened = res + return res + if config.ip_external: for ip_external in config.ip_external: SiteManager.peer_blacklist.append((ip_external, self.port)) # Add myself to peer blacklist @@ -216,7 +230,7 @@ class FileServer(ConnectionServer): # Check site file integrity def checkSite(self, site, check_files=False): - if site.settings["serving"]: + if site.isServing(): site.announce(mode="startup") # Announce site to tracker site.update(check_files=check_files) # Update site's content.json and download changed files site.sendMyHashfield() @@ -231,7 +245,7 @@ class FileServer(ConnectionServer): if not self.port_opened or force_port_check: # Test and open port if not tested yet if len(self.sites) <= 2: # Don't wait port opening on first startup sites_checking = True - for address, site in self.sites.items(): + for address, site in list(self.sites.items()): gevent.spawn(self.checkSite, site, check_files) self.portCheck() @@ -242,8 +256,8 @@ class FileServer(ConnectionServer): if not sites_checking: check_pool = gevent.pool.Pool(5) # Check sites integrity - for site in sorted(self.sites.values(), key=lambda site: site.settings.get("modified", 0), reverse=True): - if not site.settings["serving"]: + for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True): + if not site.isServing(): continue check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread time.sleep(2) @@ -263,8 +277,8 @@ class FileServer(ConnectionServer): (len(self.connections), self.has_internet, len(peers_protected)) ) - for address, site in self.sites.items(): - if not site.settings["serving"]: + for address, site in list(self.sites.items()): + if not site.isServing(): continue if not startup: @@ -273,16 +287,18 @@ class FileServer(ConnectionServer): time.sleep(1) # Prevent too quick request peers_protected = set([]) - for address, site in self.sites.items(): - if not site.settings["serving"]: + for address, site in list(self.sites.items()): + if not site.isServing(): continue if site.peers: with gevent.Timeout(10, exception=False): site.announcer.announcePex() - # Retry failed files - if site.bad_files: + # Last check modification failed + if site.content_updated is False: + site.update() + elif site.bad_files: site.retryBadFiles() if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7: @@ -302,7 +318,8 @@ class FileServer(ConnectionServer): def announceSite(self, site): site.announce(mode="update", pex=False) active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60 - if site.settings["own"] or active_site: # Check connections more frequently on own and active sites to speed-up first connections + if site.settings["own"] or active_site: + # Check connections more frequently on own and active sites to speed-up first connections site.needConnections(check_site_on_reconnect=True) site.sendMyHashfield(3) site.updateHashfield(3) @@ -313,39 +330,61 @@ class FileServer(ConnectionServer): while 1: config.loadTrackersFile() s = time.time() - for address, site in self.sites.items(): - if not site.settings["serving"]: + for address, site in list(self.sites.items()): + if not site.isServing(): continue gevent.spawn(self.announceSite, site).join(timeout=10) time.sleep(1) taken = time.time() - s - sleep = max(0, 60 * 20 / len(config.trackers) - taken) # Query all trackers one-by-one in 20 minutes evenly distributed + # Query all trackers one-by-one in 20 minutes evenly distributed + sleep = max(0, 60 * 20 / len(config.trackers) - taken) + self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep)) time.sleep(sleep) # Detects if computer back from wakeup def wakeupWatcher(self): last_time = time.time() + last_my_ips = socket.gethostbyname_ex('')[2] while 1: time.sleep(30) - if time.time() - max(self.last_request, last_time) > 60 * 3: + is_time_changed = time.time() - max(self.last_request, last_time) > 60 * 3 + if is_time_changed: # If taken more than 3 minute then the computer was in sleep mode self.log.info( - "Wakeup detected: time warp from %s to %s (%s sleep seconds), acting like startup..." % + "Wakeup detected: time warp from %0.f to %0.f (%0.f sleep seconds), acting like startup..." % (last_time, time.time(), time.time() - last_time) ) + + my_ips = socket.gethostbyname_ex('')[2] + is_ip_changed = my_ips != last_my_ips + if is_ip_changed: + self.log.info("IP change detected from %s to %s" % (last_my_ips, my_ips)) + + if is_time_changed or is_ip_changed: self.checkSites(check_files=False, force_port_check=True) + last_time = time.time() + last_my_ips = my_ips # Bind and start serving sites def start(self, check_sites=True): + if self.stopping: + return False + ConnectionServer.start(self) + + try: + self.stream_server.start() + except Exception as err: + self.log.error("Error listening on: %s:%s: %s" % (self.ip, self.port, err)) + self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader - DebugReloader(self.reload) + DebugReloader.watcher.addCallback(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) diff --git a/src/File/__init__.py b/src/File/__init__.py index 20b28a97..1eb602d6 100644 --- a/src/File/__init__.py +++ b/src/File/__init__.py @@ -1,2 +1,2 @@ -from FileServer import FileServer -from FileRequest import FileRequest \ No newline at end of file +from .FileServer import FileServer +from .FileRequest import FileRequest \ No newline at end of file diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 536ecf41..03cc1f47 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -6,11 +6,11 @@ import collections import gevent -from cStringIO import StringIO +import io from Debug import Debug from Config import config from util import helper -from PeerHashfield import PeerHashfield +from .PeerHashfield import PeerHashfield from Plugin import PluginManager if config.use_tempfiles: @@ -21,8 +21,9 @@ if config.use_tempfiles: @PluginManager.acceptPlugins class Peer(object): __slots__ = ( - "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield", "time_added", "has_hashfield", "is_tracker_connection", - "time_my_hashfield_sent", "last_ping", "reputation", "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" + "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield", + "time_added", "has_hashfield", "is_tracker_connection", "time_my_hashfield_sent", "last_ping", "reputation", + "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" ) def __init__(self, ip, port, site=None, connection_server=None): @@ -91,11 +92,12 @@ class Peer(object): elif self.site: connection_server = self.site.connection_server else: - connection_server = sys.modules["main"].file_server + import main + connection_server = main.file_server self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection) self.reputation += 1 self.connection.sites += 1 - except Exception, err: + except Exception as err: self.onConnectionError("Getting connection error") self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) @@ -113,7 +115,10 @@ class Peer(object): return self.connection def __str__(self): - return "Peer:%-12s" % self.ip + if self.site: + return "Peer:%-12s of %s" % (self.ip, self.site.address_short) + else: + return "Peer:%-12s" % self.ip def __repr__(self): return "<%s>" % self.__str__() @@ -128,9 +133,12 @@ class Peer(object): def found(self, source="other"): if self.reputation < 5: if source == "tracker": - self.reputation += 1 + if self.ip.endswith(".onion"): + self.reputation += 1 + else: + self.reputation += 2 elif source == "local": - self.reputation += 3 + self.reputation += 20 if source in ("tracker", "local"): self.site.peers_recent.appendleft(self) @@ -146,7 +154,7 @@ class Peer(object): self.log("Send request: %s %s %s %s" % (params.get("site", ""), cmd, params.get("inner_path", ""), params.get("location", ""))) - for retry in range(1, 2): # Retry 1 times + for retry in range(1, 4): # Retry 3 times try: if not self.connection: raise Exception("No connection found") @@ -164,7 +172,7 @@ class Peer(object): return res else: raise Exception("Invalid response: %s" % res) - except Exception, err: + except Exception as err: if type(err).__name__ == "Notify": # Greenlet killed by worker self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) break @@ -195,7 +203,7 @@ class Peer(object): if config.use_tempfiles: buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') else: - buff = StringIO() + buff = io.BytesIO() s = time.time() while True: # Read in smaller parts @@ -240,7 +248,7 @@ class Peer(object): with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception res = self.request("ping") - if res and "body" in res and res["body"] == "Pong!": + if res and "body" in res and res["body"] == b"Pong!": response_time = time.time() - s break # All fine, exit from for loop # Timeout reached or bad response @@ -267,19 +275,16 @@ class Peer(object): request["peers_onion"] = packed_peers["onion"] if packed_peers["ipv6"]: request["peers_ipv6"] = packed_peers["ipv6"] - res = self.request("pex", request) - if not res or "error" in res: return False - added = 0 # Remove unsupported peer types - if "peers_ipv6" in res and "ipv6" not in self.connection.server.supported_ip_types: + if "peers_ipv6" in res and self.connection and "ipv6" not in self.connection.server.supported_ip_types: del res["peers_ipv6"] - if "peers_onion" in res and "onion" not in self.connection.server.supported_ip_types: + if "peers_onion" in res and self.connection and "onion" not in self.connection.server.supported_ip_types: del res["peers_onion"] # Add IPv4 + IPv6 @@ -313,7 +318,7 @@ class Peer(object): res = self.request("getHashfield", {"site": self.site.address}) if not res or "error" in res or "hashfield_raw" not in res: return False - self.hashfield.replaceFromString(res["hashfield_raw"]) + self.hashfield.replaceFromBytes(res["hashfield_raw"]) return self.hashfield @@ -331,16 +336,19 @@ class Peer(object): key = "peers" else: key = "peers_%s" % ip_type - for hash, peers in res.get(key, {}).items()[0:30]: + for hash, peers in list(res.get(key, {}).items())[0:30]: if ip_type == "onion": unpacker_func = helper.unpackOnionAddress else: unpacker_func = helper.unpackAddress - back[hash] += map(unpacker_func, peers) + back[hash] += list(map(unpacker_func, peers)) for hash in res.get("my", []): - back[hash].append((self.connection.ip, self.connection.port)) + if self.connection: + back[hash].append((self.connection.ip, self.connection.port)) + else: + back[hash].append((self.ip, self.port)) return back @@ -352,13 +360,26 @@ class Peer(object): if self.time_my_hashfield_sent and self.site.content_manager.hashfield.time_changed <= self.time_my_hashfield_sent: return False # Peer already has the latest hashfield - res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tostring()}) + res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tobytes()}) if not res or "error" in res: return False else: self.time_my_hashfield_sent = time.time() return True + def publish(self, address, inner_path, body, modified, diffs=[]): + if len(body) > 10 * 1024 and self.connection and self.connection.handshake.get("rev", 0) >= 4095: + # To save bw we don't push big content.json to peers + body = b"" + + return self.request("update", { + "site": address, + "inner_path": inner_path, + "body": body, + "modified": modified, + "diffs": diffs + }) + # Stop and remove from site def remove(self, reason="Removing"): self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) diff --git a/src/Peer/PeerHashfield.py b/src/Peer/PeerHashfield.py index 050d47f4..fdd414c8 100644 --- a/src/Peer/PeerHashfield.py +++ b/src/Peer/PeerHashfield.py @@ -3,7 +3,7 @@ import time class PeerHashfield(object): - __slots__ = ("storage", "time_changed", "append", "remove", "tostring", "fromstring", "__len__", "__iter__") + __slots__ = ("storage", "time_changed", "append", "remove", "tobytes", "frombytes", "__len__", "__iter__") def __init__(self): self.storage = self.createStorage() self.time_changed = time.time() @@ -12,8 +12,8 @@ class PeerHashfield(object): storage = array.array("H") self.append = storage.append self.remove = storage.remove - self.tostring = storage.tostring - self.fromstring = storage.fromstring + self.tobytes = storage.tobytes + self.frombytes = storage.frombytes self.__len__ = storage.__len__ self.__iter__ = storage.__iter__ return storage @@ -58,9 +58,9 @@ class PeerHashfield(object): def hasHash(self, hash): return int(hash[0:4], 16) in self.storage - def replaceFromString(self, hashfield_raw): + def replaceFromBytes(self, hashfield_raw): self.storage = self.createStorage() - self.storage.fromstring(hashfield_raw) + self.storage.frombytes(hashfield_raw) self.time_changed = time.time() if __name__ == "__main__": @@ -68,8 +68,8 @@ if __name__ == "__main__": s = time.time() for i in range(10000): field.appendHashId(i) - print time.time()-s + print(time.time()-s) s = time.time() for i in range(10000): field.hasHash("AABB") - print time.time()-s \ No newline at end of file + print(time.time()-s) \ No newline at end of file diff --git a/src/Peer/PeerPortchecker.py b/src/Peer/PeerPortchecker.py index 5bcf91df..3c4daecf 100644 --- a/src/Peer/PeerPortchecker.py +++ b/src/Peer/PeerPortchecker.py @@ -1,6 +1,6 @@ import logging -import urllib -import urllib2 +import urllib.request +import urllib.parse import re import time @@ -9,6 +9,10 @@ from util import UpnpPunch class PeerPortchecker(object): + checker_functions = { + "ipv4": ["checkIpfingerprints", "checkCanyouseeme"], + "ipv6": ["checkMyaddr", "checkIpv6scanner"] + } def __init__(self, file_server): self.log = logging.getLogger("PeerPortchecker") self.upnp_port_opened = False @@ -16,10 +20,12 @@ class PeerPortchecker(object): def requestUrl(self, url, post_data=None): if type(post_data) is dict: - post_data = urllib.urlencode(post_data) - req = urllib2.Request(url, post_data) - req.add_header('Referer', url) - return urllib2.urlopen(req, timeout=20.0) + post_data = urllib.parse.urlencode(post_data).encode("utf8") + req = urllib.request.Request(url, post_data) + req.add_header("Referer", url) + req.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11") + req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") + return urllib.request.urlopen(req, timeout=20.0) def portOpen(self, port): self.log.info("Trying to open port using UpnpPunch...") @@ -37,10 +43,7 @@ class PeerPortchecker(object): return UpnpPunch.ask_to_close_port(port, protos=["TCP"]) def portCheck(self, port, ip_type="ipv4"): - if ip_type == "ipv6": - checker_functions = ["checkMyaddr", "checkIpv6scanner"] - else: - checker_functions = ["checkPortchecker", "checkCanyouseeme"] + checker_functions = self.checker_functions[ip_type] for func_name in checker_functions: func = getattr(self, func_name) @@ -49,13 +52,13 @@ class PeerPortchecker(object): res = func(port) if res: self.log.info( - "Checking port %s (%s) using %s result: %s in %.3fs" % + "Checked port %s (%s) using %s result: %s in %.3fs" % (port, ip_type, func_name, res, time.time() - s) ) time.sleep(0.1) if res["opened"] and not self.file_server.had_external_incoming: res["opened"] = False - self.log.warning("Port %s:%s, but no incoming connection" % (res["ip"], port)) + self.log.warning("Port %s:%s looks opened, but no incoming connection" % (res["ip"], port)) break except Exception as err: self.log.warning( @@ -67,11 +70,12 @@ class PeerPortchecker(object): return res def checkCanyouseeme(self, port): - data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() - message = re.match('.*

    (.*?)

    ', data, re.DOTALL).group(1) - message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags + data = urllib.request.urlopen("https://www.canyouseeme.org/", b"ip=1.1.1.1&port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") - match = re.match(".*service on (.*?) on", message) + message = re.match(r'.*

    (.*?)

    ', data, re.DOTALL).group(1) + message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags + + match = re.match(r".*service on (.*?) on", message) if match: ip = match.group(1) else: @@ -84,12 +88,71 @@ class PeerPortchecker(object): else: raise Exception("Invalid response: %s" % message) - def checkPortchecker(self, port): - data = urllib2.urlopen("https://portchecker.co/check", "port=%s" % port, timeout=20.0).read() - message = re.match('.*
    (.*?)
    ', data, re.DOTALL).group(1) - message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + def checkIpfingerprints(self, port): + data = self.requestUrl("https://www.ipfingerprints.com/portscan.php").read().decode("utf8") + ip = re.match(r'.*name="remoteHost".*?value="(.*?)"', data, re.DOTALL).group(1) - match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) + post_data = { + "remoteHost": ip, "start_port": port, "end_port": port, + "normalScan": "Yes", "scan_type": "connect2", "ping_type": "none" + } + message = self.requestUrl("https://www.ipfingerprints.com/scripts/getPortsInfo.php", post_data).read().decode("utf8") + + if "open" in message: + return {"ip": ip, "opened": True} + elif "filtered" in message or "closed" in message: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message) + + def checkMyaddr(self, port): + url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php" + + data = self.requestUrl(url).read().decode("utf8") + + ip = re.match(r'.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) + + post_data = {"addr": ip, "ports_selected": "", "ports_list": port} + data = self.requestUrl(url, post_data).read().decode("utf8") + + message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) + + if "ok.png" in message: + return {"ip": ip, "opened": True} + elif "fail.png" in message: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message) + + def checkIpv6scanner(self, port): + url = "http://www.ipv6scanner.com/cgi-bin/main.py" + + data = self.requestUrl(url).read().decode("utf8") + + ip = re.match(r'.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) + + post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"} + data = self.requestUrl(url, post_data).read().decode("utf8") + + message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) + message_text = re.sub("<.*?>", " ", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + + if "OPEN" in message_text: + return {"ip": ip, "opened": True} + elif "CLOSED" in message_text or "FILTERED" in message_text: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message_text) + + def checkPortchecker(self, port): # Not working: Forbidden + data = self.requestUrl("https://portchecker.co").read().decode("utf8") + csrf = re.match(r'.*name="_csrf" value="(.*?)"', data, re.DOTALL).group(1) + + data = self.requestUrl("https://portchecker.co", {"port": port, "_csrf": csrf}).read().decode("utf8") + message = re.match(r'.*
    (.*?)
    ', data, re.DOTALL).group(1) + message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + + match = re.match(r".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) if match: ip = match.group(1) else: @@ -102,20 +165,21 @@ class PeerPortchecker(object): else: raise Exception("Invalid response: %s" % message) - def checkSubnetonline(self, port): + def checkSubnetonline(self, port): # Not working: Invalid response url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php" - data = self.requestUrl(url).read() + data = self.requestUrl(url).read().decode("utf8") - ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) - token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) - print ip + ip = re.match(r'.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) + token = re.match(r'.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."} - data = self.requestUrl(url, post_data).read() + data = self.requestUrl(url, post_data).read().decode("utf8") - message = re.match(".*
    (.*?)
    ", data, re.DOTALL).group(1) - message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + print(post_data, data) + + message = re.match(r".*
    (.*?)
    ", data, re.DOTALL).group(1) + message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags if "online" in message: return {"ip": ip, "opened": True} @@ -123,49 +187,3 @@ class PeerPortchecker(object): return {"ip": ip, "opened": False} else: raise Exception("Invalid response: %s" % message) - - def checkMyaddr(self, port): - url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php" - - data = self.requestUrl(url).read() - - ip = re.match('.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) - - post_data = {"addr": ip, "ports_selected": "", "ports_list": port} - data = self.requestUrl(url, post_data).read() - - message = re.match(".*(.*?)
    ", data, re.DOTALL).group(1) - - if "ok.png" in message: - return {"ip": ip, "opened": True} - elif "fail.png" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkIpv6scanner(self, port): - url = "http://www.ipv6scanner.com/cgi-bin/main.py" - - data = self.requestUrl(url).read() - - ip = re.match('.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) - - post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"} - data = self.requestUrl(url, post_data).read() - - message = re.match(".*(.*?)
    ", data, re.DOTALL).group(1) - message_text = re.sub("<.*?>", " ", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - if "OPEN" in message_text: - return {"ip": ip, "opened": True} - elif "CLOSED" in message_text or "FILTERED" in message_text: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message_text) - -if __name__ == "__main__": - import time - peer_portchecker = PeerPortchecker() - for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]: - s = time.time() - print(func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s)) diff --git a/src/Peer/__init__.py b/src/Peer/__init__.py index 3e92827f..e73c58c5 100644 --- a/src/Peer/__init__.py +++ b/src/Peer/__init__.py @@ -1,2 +1,2 @@ -from Peer import Peer -from PeerHashfield import PeerHashfield +from .Peer import Peer +from .PeerHashfield import PeerHashfield diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py index c5d1f79b..56540e60 100644 --- a/src/Plugin/PluginManager.py +++ b/src/Plugin/PluginManager.py @@ -5,84 +5,184 @@ import shutil import time from collections import defaultdict +import importlib +import json + from Debug import Debug from Config import config +import plugins class PluginManager: def __init__(self): self.log = logging.getLogger("PluginManager") - self.plugin_path = "plugins" # Plugin directory + self.path_plugins = None + if plugins.__file__: + self.path_plugins = os.path.dirname(os.path.abspath(plugins.__file__)); + self.path_installed_plugins = config.data_dir + "/__plugins__" self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class) self.subclass_order = {} # Record the load order of the plugins, to keep it after reload self.pluggable = {} self.plugin_names = [] # Loaded plugin names - self.after_load = [] # Execute functions after loaded plugins + self.plugins_updated = {} # List of updated plugins since restart + self.plugins_rev = {} # Installed plugins revision numbers + self.after_load = [] # Execute functions after loaded plugins + self.function_flags = {} # Flag function for permissions + self.reloading = False + self.config_path = config.data_dir + "/plugins.json" + self.loadConfig() - sys.path.append(os.path.join(os.getcwd(), self.plugin_path)) + self.config.setdefault("builtin", {}) + + if self.path_plugins: + sys.path.append(os.path.join(os.getcwd(), self.path_plugins)) self.migratePlugins() if config.debug: # Auto reload Plugins on file change from Debug import DebugReloader - DebugReloader(self.reloadPlugins) + DebugReloader.watcher.addCallback(self.reloadPlugins) + + def loadConfig(self): + if os.path.isfile(self.config_path): + try: + self.config = json.load(open(self.config_path, encoding="utf8")) + except Exception as err: + self.log.error("Error loading %s: %s" % (self.config_path, err)) + self.config = {} + else: + self.config = {} + + def saveConfig(self): + f = open(self.config_path, "w", encoding="utf8") + json.dump(self.config, f, ensure_ascii=False, sort_keys=True, indent=2) def migratePlugins(self): - for dir_name in os.listdir(self.plugin_path): + for dir_name in os.listdir(self.path_plugins): if dir_name == "Mute": self.log.info("Deleting deprecated/renamed plugin: %s" % dir_name) - shutil.rmtree("%s/%s" % (self.plugin_path, dir_name)) + shutil.rmtree("%s/%s" % (self.path_plugins, dir_name)) # -- Load / Unload -- - # Load all plugin - def loadPlugins(self): - s = time.time() - for dir_name in sorted(os.listdir(self.plugin_path)): - dir_path = os.path.join(self.plugin_path, dir_name) + def listPlugins(self, list_disabled=False): + plugins = [] + for dir_name in sorted(os.listdir(self.path_plugins)): + dir_path = os.path.join(self.path_plugins, dir_name) + plugin_name = dir_name.replace("disabled-", "") if dir_name.startswith("disabled"): - continue # Dont load if disabled - if not os.path.isdir(dir_path): - continue # Dont load if not dir + is_enabled = False + else: + is_enabled = True + + plugin_config = self.config["builtin"].get(plugin_name, {}) + if "enabled" in plugin_config: + is_enabled = plugin_config["enabled"] + + if dir_name == "__pycache__" or not os.path.isdir(dir_path): + continue # skip if dir_name.startswith("Debug") and not config.debug: continue # Only load in debug mode if module name starts with Debug - self.log.debug("Loading plugin: %s" % dir_name) + if not is_enabled and not list_disabled: + continue # Dont load if disabled + + plugin = {} + plugin["source"] = "builtin" + plugin["name"] = plugin_name + plugin["dir_name"] = dir_name + plugin["dir_path"] = dir_path + plugin["inner_path"] = plugin_name + plugin["enabled"] = is_enabled + plugin["rev"] = config.rev + plugin["loaded"] = plugin_name in self.plugin_names + plugins.append(plugin) + + plugins += self.listInstalledPlugins(list_disabled) + return plugins + + def listInstalledPlugins(self, list_disabled=False): + plugins = [] + + for address, site_plugins in sorted(self.config.items()): + if address == "builtin": + continue + for plugin_inner_path, plugin_config in sorted(site_plugins.items()): + is_enabled = plugin_config.get("enabled", False) + if not is_enabled and not list_disabled: + continue + plugin_name = os.path.basename(plugin_inner_path) + + dir_path = "%s/%s/%s" % (self.path_installed_plugins, address, plugin_inner_path) + + plugin = {} + plugin["source"] = address + plugin["name"] = plugin_name + plugin["dir_name"] = plugin_name + plugin["dir_path"] = dir_path + plugin["inner_path"] = plugin_inner_path + plugin["enabled"] = is_enabled + plugin["rev"] = plugin_config.get("rev", 0) + plugin["loaded"] = plugin_name in self.plugin_names + plugins.append(plugin) + + return plugins + + # Load all plugin + def loadPlugins(self): + all_loaded = True + s = time.time() + if self.path_plugins is None: + return + for plugin in self.listPlugins(): + self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"])) + if plugin["source"] != "builtin": + self.plugins_rev[plugin["name"]] = plugin["rev"] + site_plugin_dir = os.path.dirname(plugin["dir_path"]) + if site_plugin_dir not in sys.path: + sys.path.append(site_plugin_dir) try: - __import__(dir_name) - except Exception, err: - self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err))) - if dir_name not in self.plugin_names: - self.plugin_names.append(dir_name) + sys.modules[plugin["name"]] = __import__(plugin["dir_name"]) + except Exception as err: + self.log.error("Plugin %s load error: %s" % (plugin["name"], Debug.formatException(err))) + all_loaded = False + if plugin["name"] not in self.plugin_names: + self.plugin_names.append(plugin["name"]) self.log.debug("Plugins loaded in %.3fs" % (time.time() - s)) for func in self.after_load: func() + return all_loaded # Reload all plugins def reloadPlugins(self): + self.reloading = True self.after_load = [] self.plugins_before = self.plugins self.plugins = defaultdict(list) # Reset registered plugins - for module_name, module in sys.modules.items(): - if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path - if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled - # Re-add non-reloadable plugins - for class_name, classes in self.plugins_before.iteritems(): - for c in classes: - if c.__module__ != module.__name__: - continue - self.plugins[class_name].append(c) - else: - try: - reload(module) - except Exception, err: - self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) + for module_name, module in list(sys.modules.items()): + if not module or not getattr(module, "__file__", None): + continue + if self.path_plugins not in module.__file__ and self.path_installed_plugins not in module.__file__: + continue + + if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled + # Re-add non-reloadable plugins + for class_name, classes in self.plugins_before.items(): + for c in classes: + if c.__module__ != module.__name__: + continue + self.plugins[class_name].append(c) + else: + try: + importlib.reload(module) + except Exception as err: + self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) self.loadPlugins() # Load new plugins # Change current classes in memory import gc patched = {} - for class_name, classes in self.plugins.iteritems(): + for class_name, classes in self.plugins.items(): classes = classes[:] # Copy the current plugins classes.reverse() base_class = self.pluggable[class_name] # Original class @@ -96,8 +196,8 @@ class PluginManager: # Change classes in modules patched = {} - for class_name, classes in self.plugins.iteritems(): - for module_name, module in sys.modules.iteritems(): + for class_name, classes in self.plugins.items(): + for module_name, module in list(sys.modules.items()): if class_name in dir(module): if "__class__" not in dir(getattr(module, class_name)): # Not a class continue @@ -110,6 +210,7 @@ class PluginManager: patched[class_name] = patched.get(class_name, 0) + 1 self.log.debug("Patched modules: %s" % patched) + self.reloading = False plugin_manager = PluginManager() # Singletone @@ -134,7 +235,7 @@ def acceptPlugins(base_class): if str(key) in plugin_manager.subclass_order[class_name] else 9999 ) - plugin_manager.subclass_order[class_name] = map(str, classes) + plugin_manager.subclass_order[class_name] = list(map(str, classes)) classes.reverse() classes.append(base_class) # Add the class itself to end of inherience line @@ -147,6 +248,13 @@ def acceptPlugins(base_class): # Register plugin to class name decorator def registerTo(class_name): + if config.debug and not plugin_manager.reloading: + import gc + for obj in gc.get_objects(): + if type(obj).__name__ == class_name: + raise Exception("Class %s instances already present in memory" % class_name) + break + plugin_manager.log.debug("New plugin registered to: %s" % class_name) if class_name not in plugin_manager.plugins: plugin_manager.plugins[class_name] = [] @@ -181,4 +289,4 @@ if __name__ == "__main__": else: return "Can't route to", path - print Request().route("MainPage") + print(Request().route("MainPage")) diff --git a/src/Site/Site.py b/src/Site/Site.py index 29b00cab..d6179307 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -7,6 +7,7 @@ import random import sys import hashlib import collections +import base64 import gevent import gevent.pool @@ -17,14 +18,15 @@ from Peer import Peer from Worker import WorkerManager from Debug import Debug from Content import ContentManager -from SiteStorage import SiteStorage +from .SiteStorage import SiteStorage from Crypt import CryptHash from util import helper from util import Diff +from util import GreenletManager from Plugin import PluginManager from File import FileServer -from SiteAnnouncer import SiteAnnouncer -import SiteManager +from .SiteAnnouncer import SiteAnnouncer +from . import SiteManager @PluginManager.acceptPlugins @@ -32,15 +34,17 @@ class Site(object): def __init__(self, address, allow_create=True, settings=None): self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address - self.address_hash = hashlib.sha256(self.address).digest() + self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest() + self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest() self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging self.log = logging.getLogger("Site:%s" % self.address_short) self.addEventListeners() self.content = None # Load content.json self.peers = {} # Key: ip:port, Value: Peer.Peer - self.peers_recent = collections.deque(maxlen=100) + self.peers_recent = collections.deque(maxlen=150) self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself) + self.greenlet_manager = GreenletManager.GreenletManager() # Running greenlets self.worker_manager = WorkerManager(self) # Handle site download from other peers self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept) self.content_updated = None # Content.js update time @@ -53,21 +57,18 @@ class Site(object): self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files self.content_manager = ContentManager(self) self.content_manager.loadContents() # Load content.json files - if "main" in sys.modules and "file_server" in dir(sys.modules["main"]): # Use global file server by default if possible - self.connection_server = sys.modules["main"].file_server - else: - if "main" in sys.modules: - sys.modules["main"].file_server = FileServer() - self.connection_server = sys.modules["main"].file_server + if "main" in sys.modules: # import main has side-effects, breaks tests + import main + if "file_server" in dir(main): # Use global file server by default if possible + self.connection_server = main.file_server else: - self.connection_server = FileServer() + main.file_server = FileServer() + self.connection_server = main.file_server + else: + self.connection_server = FileServer() self.announcer = SiteAnnouncer(self) # Announce and get peer list from other nodes - if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed) - self.settings["auth_key"] = CryptHash.random() - self.log.debug("New auth key: %s" % self.settings["auth_key"]) - if not self.settings.get("wrapper_key"): # To auth websocket permissions self.settings["wrapper_key"] = CryptHash.random() self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"]) @@ -90,7 +91,7 @@ class Site(object): self.settings = settings if "cache" not in settings: settings["cache"] = {} - if "size_optional" not in settings: + if "size_files_optional" not in settings: settings["size_optional"] = 0 if "optional_downloaded" not in settings: settings["optional_downloaded"] = 0 @@ -110,7 +111,7 @@ class Site(object): self.settings["autodownloadoptional"] = True # Add admin permissions to homepage - if self.address == config.homepage and "ADMIN" not in self.settings["permissions"]: + if self.address in (config.homepage, config.updatesite) and "ADMIN" not in self.settings["permissions"]: self.settings["permissions"].append("ADMIN") return @@ -122,12 +123,18 @@ class Site(object): if not SiteManager.site_manager.sites.get(self.address): SiteManager.site_manager.sites[self.address] = self SiteManager.site_manager.load(False) - SiteManager.site_manager.save() + SiteManager.site_manager.saveDelayed() + + def isServing(self): + if config.offline: + return False + else: + return self.settings["serving"] def getSettingsCache(self): back = {} back["bad_files"] = self.bad_files - back["hashfield"] = self.content_manager.hashfield.tostring().encode("base64") + back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii") return back # Max site size in MB @@ -136,18 +143,24 @@ class Site(object): # Next size limit based on current size def getNextSizeLimit(self): - size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] + size_limits = [25, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] size = self.settings.get("size", 0) for size_limit in size_limits: if size * 1.2 < size_limit * 1024 * 1024: return size_limit return 999999 + def isAddedRecently(self): + return time.time() - self.settings.get("added", 0) < 60 * 60 * 24 + # Download all file from content.json def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False, diffs={}): s = time.time() if config.verbose: - self.log.debug("Downloading %s..." % inner_path) + self.log.debug( + "DownloadContent %s: Started. (download_files: %s, check_modifications: %s, diffs: %s)..." % + (inner_path, download_files, check_modifications, diffs.keys()) + ) if not inner_path.endswith("content.json"): return False @@ -155,25 +168,39 @@ class Site(object): found = self.needFile(inner_path, update=self.bad_files.get(inner_path)) content_inner_dir = helper.getDirname(inner_path) if not found: - self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications)) + self.log.debug("DownloadContent %s: Download failed, check_modifications: %s" % (inner_path, check_modifications)) if check_modifications: # Download failed, but check modifications if its succed later self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications") return False # Could not download content.json if config.verbose: - self.log.debug("Got %s" % inner_path) + self.log.debug("DownloadContent got %s" % inner_path) + sub_s = time.time() + changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False) + if config.verbose: + self.log.debug("DownloadContent %s: loadContent done in %.3fs" % (inner_path, time.time() - sub_s)) + if inner_path == "content.json": self.saveSettings() if peer: # Update last received update from peer to prevent re-sending the same update to it peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"] + # Verify size limit + if inner_path == "content.json": + site_size_limit = self.getSizeLimit() * 1024 * 1024 + content_size = len(json.dumps(self.content_manager.contents[inner_path], indent=1)) + sum([file["size"] for file in list(self.content_manager.contents[inner_path].get("files", {}).values()) if file["size"] >= 0]) # Size of new content + if site_size_limit < content_size: + # Not enought don't download anything + self.log.debug("DownloadContent Size limit reached (site too big please increase limit): %.2f MB > %.2f MB" % (content_size / 1024 / 1024, site_size_limit / 1024 / 1024)) + return False + # Start download files file_threads = [] if download_files: - for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys(): + for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()): file_inner_path = content_inner_dir + file_relative_path # Try to diff first @@ -201,11 +228,11 @@ class Site(object): time_on_done = time.time() - s self.log.debug( - "Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % + "DownloadContent Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % (file_inner_path, time_diff, time_verify, time_write, time_on_done) ) - except Exception, err: - self.log.debug("Failed to patch %s: %s" % (file_inner_path, err)) + except Exception as err: + self.log.debug("DownloadContent Failed to patch %s: %s" % (file_inner_path, err)) diff_success = False if not diff_success: @@ -218,7 +245,7 @@ class Site(object): if inner_path == "content.json": gevent.spawn(self.updateHashfield) - for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys(): + for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()): file_inner_path = content_inner_dir + file_relative_path if file_inner_path not in changed and not self.bad_files.get(file_inner_path): continue @@ -233,28 +260,27 @@ class Site(object): # Wait for includes download include_threads = [] - for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys(): + for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()): file_inner_path = content_inner_dir + file_relative_path include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer) include_threads.append(include_thread) if config.verbose: - self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads))) + self.log.debug("DownloadContent %s: Downloading %s includes..." % (inner_path, len(include_threads))) gevent.joinall(include_threads) if config.verbose: - self.log.debug("%s: Includes download ended" % inner_path) + self.log.debug("DownloadContent %s: Includes download ended" % inner_path) if check_modifications: # Check if every file is up-to-date self.checkModifications(0) if config.verbose: - self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) + self.log.debug("DownloadContent %s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) gevent.joinall(file_threads) if config.verbose: - self.log.debug("%s: DownloadContent ended in %.3fs" % (inner_path, time.time() - s)) - - if not self.worker_manager.tasks: - self.onComplete() # No more task trigger site complete + self.log.debug("DownloadContent %s: ended in %.3fs (tasks left: %s)" % ( + inner_path, time.time() - s, len(self.worker_manager.tasks) + )) return True @@ -262,7 +288,7 @@ class Site(object): def getReachableBadFiles(self): if not self.bad_files: return False - return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3] + return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3] # Retry download bad files def retryBadFiles(self, force=False): @@ -272,7 +298,7 @@ class Site(object): content_inner_paths = [] file_inner_paths = [] - for bad_file, tries in self.bad_files.items(): + for bad_file, tries in list(self.bad_files.items()): if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min if bad_file.endswith("content.json"): content_inner_paths.append(bad_file) @@ -286,7 +312,7 @@ class Site(object): self.pooledDownloadFile(file_inner_paths, only_if_bad=True) def checkBadFiles(self): - for bad_file in self.bad_files.keys(): + for bad_file in list(self.bad_files.keys()): file_info = self.content_manager.getFileInfo(bad_file) if bad_file.endswith("content.json"): if file_info is False and bad_file != "content.json": @@ -299,16 +325,22 @@ class Site(object): # Download all files of the site @util.Noparallel(blocking=False) - def download(self, check_size=False, blind_includes=False): + def download(self, check_size=False, blind_includes=False, retry_bad_files=True): if not self.connection_server: self.log.debug("No connection server found, skipping download") return False + s = time.time() self.log.debug( - "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s" % - (self.bad_files, check_size, blind_includes) + "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s, isAddedRecently: %s" % + (self.bad_files, check_size, blind_includes, self.isAddedRecently()) ) - gevent.spawn(self.announce, force=True) + + if self.isAddedRecently(): + gevent.spawn(self.announce, mode="start", force=True) + else: + gevent.spawn(self.announce, mode="update") + if check_size: # Check the size first valid = self.downloadContent("content.json", download_files=False) # Just download content.json files if not valid: @@ -317,7 +349,9 @@ class Site(object): # Download everything valid = self.downloadContent("content.json", check_modifications=blind_includes) - self.onComplete.once(lambda: self.retryBadFiles(force=True)) + if retry_bad_files: + self.onComplete.once(lambda: self.retryBadFiles(force=True)) + self.log.debug("Download done in %.3fs" % (time.time() - s)) return valid @@ -340,6 +374,7 @@ class Site(object): del self.bad_files[aborted_inner_path] self.worker_manager.removeSolvedFileTasks(mark_as_good=False) break + pool.join() self.log.debug("Ended downloadContent pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) def pooledDownloadFile(self, inner_paths, pool_size=100, only_if_bad=False): @@ -357,12 +392,13 @@ class Site(object): # Update worker, try to find client that supports listModifications command def updater(self, peers_try, queried, since): + threads = [] while 1: if not peers_try or len(queried) >= 3: # Stop after 3 successful query break peer = peers_try.pop(0) if config.verbose: - self.log.debug("Try to get updates from: %s Left: %s" % (peer, peers_try)) + self.log.debug("CheckModifications: Try to get updates from: %s Left: %s" % (peer, peers_try)) res = None with gevent.Timeout(20, exception=False): @@ -374,7 +410,8 @@ class Site(object): queried.append(peer) modified_contents = [] my_modified = self.content_manager.listModified(since) - for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we + num_old_files = 0 + for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we has_newer = int(modified) > my_modified.get(inner_path, 0) has_older = int(modified) < my_modified.get(inner_path, 0) if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified): @@ -382,13 +419,18 @@ class Site(object): # We dont have this file or we have older modified_contents.append(inner_path) self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 - if has_older: - self.log.debug("%s client has older version of %s, publishing there..." % (peer, inner_path)) + if has_older and num_old_files < 5: + num_old_files += 1 + self.log.debug("CheckModifications: %s client has older version of %s, publishing there (%s/5)..." % (peer, inner_path, num_old_files)) gevent.spawn(self.publisher, inner_path, [peer], [], 1) if modified_contents: - self.log.debug("%s new modified file from %s" % (len(modified_contents), peer)) + self.log.debug("CheckModifications: %s new modified file from %s" % (len(modified_contents), peer)) modified_contents.sort(key=lambda inner_path: 0 - res["modified_files"][inner_path]) # Download newest first - gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True) + t = gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True) + threads.append(t) + if config.verbose: + self.log.debug("CheckModifications: Waiting for %s pooledDownloadContent" % len(threads)) + gevent.joinall(threads) # Check modified content.json files from peers and add modified files to bad_files # Return: Successfully queried peers [Peer, Peer...] @@ -400,10 +442,10 @@ class Site(object): # Wait for peers if not self.peers: - self.announce() + self.announce(mode="update") for wait in range(10): time.sleep(5 + wait) - self.log.debug("Waiting for peers...") + self.log.debug("CheckModifications: Waiting for peers...") if self.peers: break @@ -417,7 +459,7 @@ class Site(object): if config.verbose: self.log.debug( - "Try to get listModifications from peers: %s, connected: %s, since: %s" % + "CheckModifications: Try to get listModifications from peers: %s, connected: %s, since: %s" % (peers_try, peers_connected_num, since) ) @@ -434,7 +476,7 @@ class Site(object): if queried: break - self.log.debug("Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since)) + self.log.debug("CheckModifications: Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since)) time.sleep(0.1) return queried @@ -444,13 +486,20 @@ class Site(object): def update(self, announce=False, check_files=False, since=None): self.content_manager.loadContent("content.json", load_includes=False) # Reload content.json self.content_updated = None # Reset content updated time + + if check_files: + self.storage.updateBadFiles(quick_check=True) # Quick check and mark bad files based on file size + + if not self.isServing(): + return False + self.updateWebsocket(updating=True) # Remove files that no longer in content.json self.checkBadFiles() if announce: - self.announce(force=True) + self.announce(mode="update", force=True) # Full update, we can reset bad files if check_files and since == 0: @@ -458,9 +507,6 @@ class Site(object): queried = self.checkModifications(since) - if check_files: - self.storage.updateBadFiles(quick_check=True) # Quick check and mark bad files based on file size - changed, deleted = self.content_manager.loadContent("content.json", load_includes=False) if self.bad_files: @@ -470,7 +516,6 @@ class Site(object): if len(queried) == 0: # Failed to query modifications self.content_updated = False - self.bad_files["content.json"] = 1 else: self.content_updated = time.time() @@ -480,7 +525,7 @@ class Site(object): def redownloadContents(self): # Download all content.json again content_threads = [] - for inner_path in self.content_manager.contents.keys(): + for inner_path in list(self.content_manager.contents.keys()): content_threads.append(self.needFile(inner_path, update=True, blocking=False)) self.log.debug("Waiting %s content.json to finish..." % len(content_threads)) @@ -515,15 +560,10 @@ class Site(object): for retry in range(2): try: with gevent.Timeout(timeout, False): - result = peer.request("update", { - "site": self.address, - "inner_path": inner_path, - "body": body, - "diffs": diffs - }) + result = peer.publish(self.address, inner_path, body, content_json_modified, diffs) if result: break - except Exception, err: + except Exception as err: self.log.error("Publish error: %s" % Debug.formatException(err)) result = {"exception": Debug.formatException(err)} @@ -545,7 +585,7 @@ class Site(object): publishers = [] # Publisher threads if not self.peers: - self.announce() + self.announce(mode="more") if limit == "default": limit = 5 @@ -563,7 +603,7 @@ class Site(object): peers = set(peers) self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % ( - inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024 + inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024 )) if not peers: @@ -595,6 +635,7 @@ class Site(object): return len(published) # Copy this site + @util.Noparallel() def clone(self, address, privatekey=None, address_index=None, root_inner_path="", overwrite=False): import shutil new_site = SiteManager.site_manager.need(address, all_file=False) @@ -631,8 +672,8 @@ class Site(object): ) # Copy files - for content_inner_path, content in self.content_manager.contents.items(): - file_relative_paths = content.get("files", {}).keys() + for content_inner_path, content in list(self.content_manager.contents.items()): + file_relative_paths = list(content.get("files", {}).keys()) # Sign content.json at the end to make sure every file is included file_relative_paths.sort() @@ -667,9 +708,9 @@ class Site(object): shutil.copy(file_path, file_path_dest) # If -default in path, create a -default less copy of the file - if "-default" in file_inner_path: - file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", "")) - if new_site.storage.isFile(file_inner_path.replace("-default", "")) and not overwrite: + if "-default" in file_inner_path_dest: + file_path_dest = new_site.storage.getPath(file_inner_path_dest.replace("-default", "")) + if new_site.storage.isFile(file_inner_path_dest.replace("-default", "")) and not overwrite: # Don't overwrite site files with default ones self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path) continue @@ -680,15 +721,15 @@ class Site(object): shutil.copy(file_path, file_path_dest) # Sign if content json if file_path_dest.endswith("/content.json"): - new_site.storage.onUpdated(file_inner_path.replace("-default", "")) + new_site.storage.onUpdated(file_inner_path_dest.replace("-default", "")) new_site.content_manager.loadContent( - file_inner_path.replace("-default", ""), add_bad_files=False, + file_inner_path_dest.replace("-default", ""), add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey, remove_missing_optional=True) + new_site.content_manager.sign(file_inner_path_dest.replace("-default", ""), privatekey, remove_missing_optional=True) new_site.content_manager.loadContent( - file_inner_path, add_bad_files=False, delete_removed_files=False, load_includes=False + file_inner_path_dest, add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: @@ -700,7 +741,10 @@ class Site(object): # Rebuild DB if new_site.storage.isFile("dbschema.json"): new_site.storage.closeDb() - new_site.storage.rebuildDb() + try: + new_site.storage.rebuildDb() + except Exception as err: + self.log.error(err) return new_site @@ -709,6 +753,10 @@ class Site(object): return self.needFile(*args, **kwargs) def isFileDownloadAllowed(self, inner_path, file_info): + # Verify space for all site + if self.settings["size"] > self.getSizeLimit() * 1024 * 1024: + return False + # Verify space for file if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024: self.log.debug( "File size %s too large: %sMB > %sMB, skipping..." % @@ -731,15 +779,21 @@ class Site(object): # Check and download if file not exist def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0): - if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything + if self.worker_manager.tasks.findTask(inner_path): + task = self.worker_manager.addTask(inner_path, peer, priority=priority) + if blocking: + return task["evt"].get() + else: + return task["evt"] + elif self.storage.isFile(inner_path) and not update: # File exist, no need to do anything return True - elif self.settings["serving"] is False: # Site not serving + elif not self.isServing(): # Site not serving return False else: # Wait until file downloaded - self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file if not self.content_manager.contents.get("content.json"): # No content.json, download it first! - self.log.debug("Need content.json first") - gevent.spawn(self.announce) + self.log.debug("Need content.json first (inner_path: %s, priority: %s)" % (inner_path, priority)) + if priority > 0: + gevent.spawn(self.announce) if inner_path != "content.json": # Prevent double download task = self.worker_manager.addTask("content.json", peer) task["evt"].get() @@ -765,6 +819,8 @@ class Site(object): self.log.debug("%s: Download not allowed" % inner_path) return False + self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file + task = self.worker_manager.addTask(inner_path, peer, priority=priority, file_info=file_info) if blocking: return task["evt"].get() @@ -794,7 +850,8 @@ class Site(object): return peer def announce(self, *args, **kwargs): - self.announcer.announce(*args, **kwargs) + if self.isServing(): + self.announcer.announce(*args, **kwargs) # Keep connections to get the updates def needConnections(self, num=None, check_site_on_reconnect=False): @@ -812,7 +869,7 @@ class Site(object): self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers))) if connected < need: # Need more than we have - for peer in self.peers.values(): + for peer in self.getRecentPeers(30): if not peer.connection or not peer.connection.connected: # No peer connection or disconnected peer.pex() # Initiate peer exchange if peer.connection and peer.connection.connected: @@ -831,13 +888,15 @@ class Site(object): # Return: Probably peers verified to be connectable recently def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True): - peers = self.peers.values() + peers = list(self.peers.values()) found = [] for peer in peers: if peer.key.endswith(":0"): continue # Not connectable if not peer.connection: continue # No connection + if peer.ip.endswith(".onion") and not self.connection_server.tor_manager.enabled: + continue # Onion not supported if peer.key in ignore: continue # The requester has this peer if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago @@ -862,7 +921,10 @@ class Site(object): # Return: Recently found peers def getRecentPeers(self, need_num): found = list(set(self.peers_recent)) - self.log.debug("Recent peers %s of %s (need: %s)" % (len(found), len(self.peers_recent), need_num)) + self.log.debug( + "Recent peers %s of %s (need: %s)" % + (len(found), len(self.peers), need_num) + ) if len(found) >= need_num or len(found) >= len(self.peers): return sorted( @@ -873,8 +935,13 @@ class Site(object): # Add random peers need_more = need_num - len(found) + if not self.connection_server.tor_manager.enabled: + peers = [peer for peer in self.peers.values() if not peer.ip.endswith(".onion")] + else: + peers = list(self.peers.values()) + found_more = sorted( - self.peers.values()[0:need_more * 50], + peers[0:need_more * 50], key=lambda peer: peer.reputation, reverse=True )[0:need_more * 2] @@ -906,7 +973,7 @@ class Site(object): # Cleanup probably dead peers and close connection if too much def cleanupPeers(self, peers_protected=[]): - peers = self.peers.values() + peers = list(self.peers.values()) if len(peers) > 20: # Cleanup old peers removed = 0 @@ -994,14 +1061,22 @@ class Site(object): return self.settings.get("autodownloadoptional") def delete(self): + self.log.info("Deleting site...") + s = time.time() self.settings["serving"] = False + self.settings["deleting"] = True self.saveSettings() + num_greenlets = self.greenlet_manager.stopGreenlets("Site %s deleted" % self.address) self.worker_manager.running = False - self.worker_manager.stopWorkers() - self.storage.deleteFiles() - self.updateWebsocket() - self.content_manager.contents.db.deleteSite(self) + num_workers = self.worker_manager.stopWorkers() SiteManager.site_manager.delete(self.address) + self.content_manager.contents.db.deleteSite(self) + self.updateWebsocket(deleted=True) + self.storage.deleteFiles() + self.log.info( + "Deleted site in %.3fs (greenlets: %s, workers: %s)" % + (time.time() - s, num_greenlets, num_workers) + ) # - Events - @@ -1019,7 +1094,7 @@ class Site(object): # Send site status update to websocket clients def updateWebsocket(self, **kwargs): if kwargs: - param = {"event": kwargs.items()[0]} + param = {"event": list(kwargs.items())[0]} else: param = None for ws in self.websockets: diff --git a/src/Site/SiteAnnouncer.py b/src/Site/SiteAnnouncer.py index 48a05e69..2fd63e82 100644 --- a/src/Site/SiteAnnouncer.py +++ b/src/Site/SiteAnnouncer.py @@ -1,23 +1,16 @@ import random import time import hashlib -import urllib -import urllib2 -import struct -import socket import re import collections -from lib import bencode -from lib.subtl.subtl import UdpTrackerClient -from lib.PySocks import socks -from lib.PySocks import sockshandler import gevent from Plugin import PluginManager from Config import config from Debug import Debug from util import helper +from greenlet import GreenletExit import util @@ -42,12 +35,12 @@ class SiteAnnouncer(object): def getSupportedTrackers(self): trackers = self.getTrackers() - if config.disable_udp or config.trackers_proxy != "disable": - trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")] if not self.site.connection_server.tor_manager.enabled: trackers = [tracker for tracker in trackers if ".onion" not in tracker] + trackers = [tracker for tracker in trackers if self.getAddressParts(tracker)] # Remove trackers with unknown address + if "ipv6" not in self.site.connection_server.supported_ip_types: trackers = [tracker for tracker in trackers if helper.getIpType(self.getAddressParts(tracker)["ip"]) != "ipv6"] @@ -68,8 +61,8 @@ class SiteAnnouncer(object): def getOpenedServiceTypes(self): back = [] # Type of addresses they can reach me - if config.trackers_proxy == "disable": - for ip_type, opened in self.site.connection_server.port_opened.items(): + if config.trackers_proxy == "disable" and config.tor != "always": + for ip_type, opened in list(self.site.connection_server.port_opened.items()): if opened: back.append(ip_type) if self.site.connection_server.tor_manager.start_onions: @@ -100,11 +93,12 @@ class SiteAnnouncer(object): for tracker in trackers: # Start announce threads tracker_stats = global_stats[tracker] # Reduce the announce time for trackers that looks unreliable - if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time.time() - 60 * min(30, tracker_stats["num_error"]): + time_announce_allowed = time.time() - 60 * min(30, tracker_stats["num_error"]) + if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time_announce_allowed and not force: if config.verbose: self.site.log.debug("Tracker %s looks unreliable, announce skipped (error: %s)" % (tracker, tracker_stats["num_error"])) continue - thread = gevent.spawn(self.announceTracker, tracker, mode=mode) + thread = self.site.greenlet_manager.spawn(self.announceTracker, tracker, mode=mode) threads.append(thread) thread.tracker = tracker @@ -144,7 +138,7 @@ class SiteAnnouncer(object): self.site.log.error("Announce to %s trackers in %.3fs, failed" % (len(threads), time.time() - s)) if len(threads) == 1 and mode != "start": # Move to next tracker self.site.log.debug("Tracker failed, skipping to next one...") - gevent.spawn_later(1.0, self.announce, force=force, mode=mode, pex=pex) + self.site.greenlet_manager.spawnLater(1.0, self.announce, force=force, mode=mode, pex=pex) self.updateWebsocket(trackers="announced") @@ -158,27 +152,20 @@ class SiteAnnouncer(object): self.updateWebsocket(pex="announced") def getTrackerHandler(self, protocol): - if protocol == "udp": - handler = self.announceTrackerUdp - elif protocol == "http": - handler = self.announceTrackerHttp - elif protocol == "https": - handler = self.announceTrackerHttps - else: - handler = None - return handler + return None def getAddressParts(self, tracker): if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker): return None protocol, address = tracker.split("://", 1) - try: + if ":" in address: ip, port = address.rsplit(":", 1) - except ValueError as err: + else: ip = address - port = 80 if protocol.startswith("https"): port = 443 + else: + port = 80 back = {} back["protocol"] = protocol back["address"] = address @@ -190,7 +177,7 @@ class SiteAnnouncer(object): s = time.time() address_parts = self.getAddressParts(tracker) if not address_parts: - self.site.log.warning("Tracker %s error: Invalid address" % tracker.decode("utf8", "ignore")) + self.site.log.warning("Tracker %s error: Invalid address" % tracker) return False if tracker not in self.stats: @@ -214,19 +201,21 @@ class SiteAnnouncer(object): peers = handler(address_parts["address"], mode=mode, num_want=num_want) else: raise AnnounceError("Unknown protocol: %s" % address_parts["protocol"]) - except Exception, err: - self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, str(err).decode("utf8", "ignore"), mode)) + except Exception as err: + self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, Debug.formatException(err), mode)) error = err if error: self.stats[tracker]["status"] = "error" self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["last_error"] = str(err).decode("utf8", "ignore") + self.stats[tracker]["last_error"] = str(error) self.stats[tracker]["time_last_error"] = time.time() - self.stats[tracker]["num_error"] += 1 + if self.site.connection_server.has_internet: + self.stats[tracker]["num_error"] += 1 self.stats[tracker]["num_request"] += 1 global_stats[tracker]["num_request"] += 1 - global_stats[tracker]["num_error"] += 1 + if self.site.connection_server.has_internet: + global_stats[tracker]["num_error"] += 1 self.updateWebsocket(tracker="error") return False @@ -266,111 +255,6 @@ class SiteAnnouncer(object): ) return time.time() - s - def announceTrackerUdp(self, tracker_address, mode="start", num_want=10): - s = time.time() - if config.disable_udp: - raise AnnounceError("Udp disabled by config") - if config.trackers_proxy != "disable": - raise AnnounceError("Udp trackers not available with proxies") - - ip, port = tracker_address.split("/")[0].split(":") - tracker = UdpTrackerClient(ip, int(port)) - if helper.getIpType(ip) in self.getOpenedServiceTypes(): - tracker.peer_port = self.fileserver_port - else: - tracker.peer_port = 0 - tracker.connect() - if not tracker.poll_once(): - raise AnnounceError("Could not connect") - tracker.announce(info_hash=hashlib.sha1(self.site.address).hexdigest(), num_want=num_want, left=431102370) - back = tracker.poll_once() - if not back: - raise AnnounceError("No response after %.0fs" % (time.time() - s)) - elif type(back) is dict and "response" in back: - peers = back["response"]["peers"] - else: - raise AnnounceError("Invalid response: %r" % back) - - return peers - - def httpRequest(self, url): - headers = { - 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', - 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', - 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', - 'Accept-Encoding': 'none', - 'Accept-Language': 'en-US,en;q=0.8', - 'Connection': 'keep-alive' - } - - req = urllib2.Request(url, headers=headers) - - if config.trackers_proxy == "tor": - tor_manager = self.site.connection_server.tor_manager - handler = sockshandler.SocksiPyHandler(socks.SOCKS5, tor_manager.proxy_ip, tor_manager.proxy_port) - opener = urllib2.build_opener(handler) - return opener.open(req, timeout=50) - elif config.trackers_proxy == "disable": - return urllib2.urlopen(req, timeout=25) - else: - proxy_ip, proxy_port = config.trackers_proxy.split(":") - handler = sockshandler.SocksiPyHandler(socks.SOCKS5, proxy_ip, int(proxy_port)) - opener = urllib2.build_opener(handler) - return opener.open(req, timeout=50) - - def announceTrackerHttps(self, *args, **kwargs): - kwargs["protocol"] = "https" - return self.announceTrackerHttp(*args, **kwargs) - - def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protocol="http"): - tracker_ip, tracker_port = tracker_address.rsplit(":", 1) - if helper.getIpType(tracker_ip) in self.getOpenedServiceTypes(): - port = self.fileserver_port - else: - port = 1 - params = { - 'info_hash': hashlib.sha1(self.site.address).digest(), - 'peer_id': self.peer_id, 'port': port, - 'uploaded': 0, 'downloaded': 0, 'left': 431102370, 'compact': 1, 'numwant': num_want, - 'event': 'started' - } - - url = protocol + "://" + tracker_address + "?" + urllib.urlencode(params) - - s = time.time() - response = None - # Load url - if config.tor == "always" or config.trackers_proxy != "disable": - timeout = 60 - else: - timeout = 30 - - with gevent.Timeout(timeout, False): # Make sure of timeout - req = self.httpRequest(url) - response = req.read() - req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions - req.close() - req = None - - if not response: - raise AnnounceError("No response after %.0fs" % (time.time() - s)) - - # Decode peers - try: - peer_data = bencode.decode(response)["peers"] - response = None - peer_count = len(peer_data) / 6 - peers = [] - for peer_offset in xrange(peer_count): - off = 6 * peer_offset - peer = peer_data[off:off + 6] - addr, port = struct.unpack('!LH', peer) - peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port}) - except Exception as err: - raise AnnounceError("Invalid response: %r (%s)" % (response, err)) - - return peers - @util.Noparallel(blocking=False) def announcePex(self, query_num=2, need_num=5): peers = self.site.getConnectedPeers() @@ -379,7 +263,7 @@ class SiteAnnouncer(object): peers = self.site.getConnectedPeers() if len(peers) == 0: # Small number of connected peers for this site, connect to any - peers = self.site.peers.values() + peers = list(self.site.getRecentPeers(20)) need_num = 10 random.shuffle(peers) @@ -393,13 +277,15 @@ class SiteAnnouncer(object): if num_added: self.site.worker_manager.onPeers() self.site.updateWebsocket(peers_added=num_added) + else: + time.sleep(0.1) if done == query_num: break self.site.log.debug("Pex result: from %s peers got %s new peers." % (done, total_added)) def updateWebsocket(self, **kwargs): if kwargs: - param = {"event": kwargs.items()[0]} + param = {"event": list(kwargs.items())[0]} else: param = None diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index 04461cd7..684d69fc 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -7,10 +7,13 @@ import atexit import gevent +import util from Plugin import PluginManager from Content import ContentDb from Config import config from util import helper +from util import RateLimit +from util import Cached @PluginManager.acceptPlugins @@ -25,14 +28,25 @@ class SiteManager(object): atexit.register(lambda: self.save(recalculate_size=True)) # Load all sites from data/sites.json + @util.Noparallel() def load(self, cleanup=True, startup=False): - self.log.debug("Loading sites...") + from Debug import Debug + self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup)) self.loaded = False - from Site import Site + from .Site import Site address_found = [] added = 0 + load_s = time.time() # Load new adresses - for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems(): + try: + json_path = "%s/sites.json" % config.data_dir + data = json.load(open(json_path)) + except Exception as err: + raise Exception("Unable to load %s: %s" % (json_path, err)) + + sites_need = [] + + for address, settings in data.items(): if address not in self.sites: if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): # Root content.json exists, try load site @@ -40,7 +54,7 @@ class SiteManager(object): try: site = Site(address, settings=settings) site.content_manager.contents.get("content.json") - except Exception, err: + except Exception as err: self.log.debug("Error loading site %s: %s" % (address, err)) continue self.sites[address] = site @@ -49,14 +63,14 @@ class SiteManager(object): elif startup: # No site directory, start download self.log.debug("Found new site in sites.json: %s" % address) - gevent.spawn(self.need, address, settings=settings) + sites_need.append([address, settings]) added += 1 address_found.append(address) # Remove deleted adresses if cleanup: - for address in self.sites.keys(): + for address in list(self.sites.keys()): if address not in address_found: del(self.sites[address]) self.log.debug("Removed site: %s" % address) @@ -65,7 +79,7 @@ class SiteManager(object): content_db = ContentDb.getContentDb() for row in content_db.execute("SELECT * FROM site").fetchall(): address = row["address"] - if address not in self.sites: + if address not in self.sites and address not in address_found: self.log.info("Deleting orphan site from content.db: %s" % address) try: @@ -78,9 +92,14 @@ class SiteManager(object): if address in content_db.sites: del content_db.sites[address] - if added: - self.log.debug("SiteManager added %s sites" % added) self.loaded = True + for address, settings in sites_need: + gevent.spawn(self.need, address, settings=settings) + if added: + self.log.info("Added %s sites in %.3fs" % (added, time.time() - load_s)) + + def saveDelayed(self): + RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save) def save(self, recalculate_size=False): if not self.sites: @@ -93,7 +112,7 @@ class SiteManager(object): data = {} # Generate data file s = time.time() - for address, site in self.list().iteritems(): + for address, site in list(self.list().items()): if recalculate_size: site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size data[address] = site.settings @@ -102,13 +121,13 @@ class SiteManager(object): s = time.time() if data: - helper.atomicWrite("%s/sites.json" % config.data_dir, json.dumps(data, indent=2, sort_keys=True)) + helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8")) else: self.log.debug("Save error: No data") time_write = time.time() - s # Remove cache from site settings - for address, site in self.list().iteritems(): + for address, site in self.list().items(): site.settings["cache"] = {} self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write)) @@ -125,41 +144,67 @@ class SiteManager(object): def isDomain(self, address): return False + @Cached(timeout=10) + def isDomainCached(self, address): + return self.isDomain(address) + + def resolveDomain(self, domain): + return False + + @Cached(timeout=10) + def resolveDomainCached(self, domain): + return self.resolveDomain(domain) + # Return: Site object or None if not found def get(self, address): + if self.isDomainCached(address): + address_resolved = self.resolveDomainCached(address) + if address_resolved: + address = address_resolved + if not self.loaded: # Not loaded yet self.log.debug("Loading site: %s)..." % address) self.load() - return self.sites.get(address) + site = self.sites.get(address) + + return site + + def add(self, address, all_file=True, settings=None, **kwargs): + from .Site import Site + self.sites_changed = int(time.time()) + # Try to find site with differect case + for recover_address, recover_site in list(self.sites.items()): + if recover_address.lower() == address.lower(): + return recover_site + + if not self.isAddress(address): + return False # Not address: %s % address + self.log.debug("Added new site: %s" % address) + config.loadTrackersFile() + site = Site(address, settings=settings) + self.sites[address] = site + if not site.settings["serving"]: # Maybe it was deleted before + site.settings["serving"] = True + site.saveSettings() + if all_file: # Also download user files on first sync + site.download(check_size=True, blind_includes=True) + return site # Return or create site and start download site files - def need(self, address, all_file=True, settings=None): - from Site import Site + def need(self, address, *args, **kwargs): + if self.isDomainCached(address): + address_resolved = self.resolveDomainCached(address) + if address_resolved: + address = address_resolved + site = self.get(address) if not site: # Site not exist yet - self.sites_changed = int(time.time()) - # Try to find site with differect case - for recover_address, recover_site in self.sites.items(): - if recover_address.lower() == address.lower(): - return recover_site - - if not self.isAddress(address): - return False # Not address: %s % address - self.log.debug("Added new site: %s" % address) - config.loadTrackersFile() - site = Site(address, settings=settings) - self.sites[address] = site - if not site.settings["serving"]: # Maybe it was deleted before - site.settings["serving"] = True - site.saveSettings() - if all_file: # Also download user files on first sync - site.download(check_size=True, blind_includes=True) - + site = self.add(address, *args, **kwargs) return site def delete(self, address): self.sites_changed = int(time.time()) - self.log.debug("SiteManager deleted site: %s" % address) + self.log.debug("Deleted site: %s" % address) del(self.sites[address]) # Delete from sites.json self.save() diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py index 6c9d48c1..27032e79 100644 --- a/src/Site/SiteStorage.py +++ b/src/Site/SiteStorage.py @@ -3,7 +3,7 @@ import re import shutil import json import time -import sys +import errno from collections import defaultdict import sqlite3 @@ -11,19 +11,25 @@ import gevent.event import util from util import SafeRe -from Db import Db +from Db.Db import Db from Debug import Debug from Config import config from util import helper +from util import ThreadPool from Plugin import PluginManager from Translate import translate as _ +thread_pool_fs_read = ThreadPool.ThreadPool(config.threads_fs_read, name="FS read") +thread_pool_fs_write = ThreadPool.ThreadPool(config.threads_fs_write, name="FS write") +thread_pool_fs_batch = ThreadPool.ThreadPool(1, name="FS batch") + + @PluginManager.acceptPlugins class SiteStorage(object): def __init__(self, site, allow_create=True): self.site = site - self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory + self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir self.log = site.log self.db = None # Db class @@ -38,11 +44,14 @@ class SiteStorage(object): raise Exception("Directory not exists: %s" % self.directory) def getDbFile(self): - if self.isFile("dbschema.json"): - schema = self.loadJson("dbschema.json") - return schema["db_file"] + if self.db: + return self.db.schema["db_file"] else: - return False + if self.isFile("dbschema.json"): + schema = self.loadJson("dbschema.json") + return schema["db_file"] + else: + return False # Create new databaseobject with the site's schema def openDb(self, close_idle=False): @@ -50,57 +59,76 @@ class SiteStorage(object): db_path = self.getPath(schema["db_file"]) return Db(schema, db_path, close_idle=close_idle) - def closeDb(self): + def closeDb(self, reason="Unknown (SiteStorage)"): if self.db: - self.db.close() + self.db.close(reason) self.event_db_busy = None self.db = None def getDbSchema(self): try: + self.site.needFile("dbschema.json") schema = self.loadJson("dbschema.json") - except Exception, err: + except Exception as err: raise Exception("dbschema.json is not a valid JSON: %s" % err) return schema - # Return db class - def getDb(self): - if not self.db: - self.log.debug("No database, waiting for dbschema.json...") - self.site.needFile("dbschema.json", priority=3) - self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist - if self.has_db: - schema = self.getDbSchema() - db_path = self.getPath(schema["db_file"]) - if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: - self.rebuildDb() - - if self.db: - self.db.close() - self.db = self.openDb(close_idle=True) + def loadDb(self): + self.log.debug("No database, waiting for dbschema.json...") + self.site.needFile("dbschema.json", priority=3) + self.log.debug("Got dbschema.json") + self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist + if self.has_db: + schema = self.getDbSchema() + db_path = self.getPath(schema["db_file"]) + if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: + try: + self.rebuildDb(reason="Missing database") + except Exception as err: + self.log.error(err) + pass + if self.db: + self.db.close("Gettig new db for SiteStorage") + self.db = self.openDb(close_idle=True) + try: changed_tables = self.db.checkTables() if changed_tables: - self.rebuildDb(delete_db=False) # TODO: only update the changed table datas + self.rebuildDb(delete_db=False, reason="Changed tables") # TODO: only update the changed table datas + except sqlite3.OperationalError: + pass + # Return db class + @util.Noparallel() + def getDb(self): + if self.event_db_busy: # Db not ready for queries + self.log.debug("Wating for db...") + self.event_db_busy.get() # Wait for event + if not self.db: + self.loadDb() return self.db def updateDbFile(self, inner_path, file=None, cur=None): path = self.getPath(inner_path) - return self.getDb().updateJson(path, file, cur) + if cur: + db = cur.db + else: + db = self.getDb() + return db.updateJson(path, file, cur) # Return possible db files for the site + @thread_pool_fs_read.wrap def getDbFiles(self): found = 0 - for content_inner_path, content in self.site.content_manager.contents.iteritems(): + for content_inner_path, content in self.site.content_manager.contents.items(): # content.json file itself if self.isFile(content_inner_path): yield content_inner_path, self.getPath(content_inner_path) else: - self.log.error("[MISSING] %s" % content_inner_path) + self.log.debug("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"): continue # We only interesed in json files file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir @@ -108,110 +136,140 @@ class SiteStorage(object): if self.isFile(file_inner_path): yield file_inner_path, self.getPath(file_inner_path) else: - self.log.error("[MISSING] %s" % file_inner_path) + self.log.debug("[MISSING] %s" % file_inner_path) found += 1 if found % 100 == 0: - time.sleep(0.000001) # Context switch to avoid UI block + time.sleep(0.001) # Context switch to avoid UI block # Rebuild sql cache @util.Noparallel() - def rebuildDb(self, delete_db=True): + @thread_pool_fs_batch.wrap + def rebuildDb(self, delete_db=True, reason="Unknown"): + self.log.info("Rebuilding db (reason: %s)..." % reason) self.has_db = self.isFile("dbschema.json") if not self.has_db: return False - self.event_db_busy = gevent.event.AsyncResult() + schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path) and delete_db: if self.db: - self.db.close() # Close db if open + self.closeDb("rebuilding") # Close db if open time.sleep(0.5) self.log.info("Deleting %s" % db_path) try: os.unlink(db_path) - except Exception, err: + except Exception as err: self.log.error("Delete error: %s" % err) - db = self.openDb() - self.log.info("Creating tables...") - db.checkTables() - cur = db.getCursor() - cur.execute("BEGIN") + if not self.db: + self.db = self.openDb() + self.event_db_busy = gevent.event.AsyncResult() + + self.log.info("Rebuild: Creating tables...") + + # raise DbTableError if not valid + self.db.checkTables() + + cur = self.db.getCursor() cur.logging = False - found = 0 s = time.time() - self.log.info("Getting db files...") + self.log.info("Rebuild: Getting db files...") db_files = list(self.getDbFiles()) - self.log.info("Importing data...") + num_imported = 0 + num_total = len(db_files) + num_error = 0 + + self.log.info("Rebuild: Importing data...") try: - if len(db_files) > 100: - self.site.messageWebsocket(_["Database rebuilding...
    Imported {0} of {1} files..."].format("0000", len(db_files)), "rebuild", 0) + if num_total > 100: + self.site.messageWebsocket( + _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( + "0000", num_total, num_error + ), "rebuild", 0 + ) for file_inner_path, file_path in db_files: try: if self.updateDbFile(file_inner_path, file=open(file_path, "rb"), cur=cur): - found += 1 - except Exception, err: + num_imported += 1 + except Exception as err: self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err))) - if found and found % 100 == 0: + num_error += 1 + + if num_imported and num_imported % 100 == 0: self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files..."].format(found, len(db_files)), - "rebuild", - int(float(found) / len(db_files) * 100) + _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( + num_imported, num_total, num_error + ), + "rebuild", int(float(num_imported) / num_total * 100) ) - time.sleep(0.000001) # Context switch to avoid UI block + time.sleep(0.001) # Context switch to avoid UI block finally: - cur.execute("END") cur.close() - db.close() - self.log.info("Closing Db: %s" % db) - if len(db_files) > 100: - self.site.messageWebsocket(_["Database rebuilding...
    Imported {0} of {1} files..."].format(found, len(db_files)), "rebuild", 100) - self.log.info("Imported %s data file in %ss" % (found, time.time() - s)) + if num_total > 100: + self.site.messageWebsocket( + _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( + num_imported, num_total, num_error + ), "rebuild", 100 + ) + self.log.info("Rebuild: Imported %s data file in %.3fs" % (num_imported, time.time() - s)) self.event_db_busy.set(True) # Event done, notify waiters self.event_db_busy = None # Clear event + self.db.commit("Rebuilt") + + return True # Execute sql query or rebuild on dberror def query(self, query, params=None): if not query.strip().upper().startswith("SELECT"): raise Exception("Only SELECT query supported") - if self.event_db_busy: # Db not ready for queries - self.log.debug("Wating for db...") - self.event_db_busy.get() # Wait for event try: res = self.getDb().execute(query, params) - except sqlite3.DatabaseError, err: + except sqlite3.DatabaseError as err: if err.__class__.__name__ == "DatabaseError": self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query)) - self.rebuildDb() + try: + self.rebuildDb(reason="Query error") + except sqlite3.OperationalError: + pass res = self.db.cur.execute(query, params) else: raise err return res + def ensureDir(self, inner_path): + try: + os.makedirs(self.getPath(inner_path)) + except OSError as err: + if err.errno == errno.EEXIST: + return False + else: + raise err + return True + # Open file object - def open(self, inner_path, mode="rb", create_dirs=False): + def open(self, inner_path, mode="rb", create_dirs=False, **kwargs): file_path = self.getPath(inner_path) if create_dirs: - file_dir = os.path.dirname(file_path) - if not os.path.isdir(file_dir): - os.makedirs(file_dir) - return open(file_path, mode) + file_inner_dir = os.path.dirname(inner_path) + self.ensureDir(file_inner_dir) + return open(file_path, mode, **kwargs) # Open file object - def read(self, inner_path, mode="r"): - return open(self.getPath(inner_path), mode).read() + @thread_pool_fs_read.wrap + def read(self, inner_path, mode="rb"): + return self.open(inner_path, mode).read() - # Write content to file - def write(self, inner_path, content): + @thread_pool_fs_write.wrap + def writeThread(self, inner_path, content): file_path = self.getPath(inner_path) # Create dir if not exist - file_dir = os.path.dirname(file_path) - if not os.path.isdir(file_dir): - os.makedirs(file_dir) + self.ensureDir(os.path.dirname(inner_path)) # Write file if hasattr(content, 'read'): # File-like object + with open(file_path, "wb") as file: shutil.copyfileobj(content, file) # Write buff to disk else: # Simple string @@ -220,7 +278,10 @@ class SiteStorage(object): else: with open(file_path, "wb") as file: file.write(content) - del content + + # Write content to file + def write(self, inner_path, content): + self.writeThread(inner_path, content) self.onUpdated(inner_path) # Remove file from filesystem @@ -235,18 +296,20 @@ class SiteStorage(object): def rename(self, inner_path_before, inner_path_after): for retry in range(3): + rename_err = None # To workaround "The process cannot access the file beacause it is being used by another process." error try: os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after)) - err = None break - except Exception, err: + except Exception as err: + rename_err = err self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry)) time.sleep(0.1 + retry) - if err: - raise err + if rename_err: + raise rename_err # List files from a directory + @thread_pool_fs_read.wrap def walk(self, dir_inner_path, ignore=None): directory = self.getPath(dir_inner_path) for root, dirs, files in os.walk(directory): @@ -279,6 +342,7 @@ class SiteStorage(object): dirs[:] = dirs_filtered # list directories in a directory + @thread_pool_fs_read.wrap def list(self, dir_inner_path): directory = self.getPath(dir_inner_path) return os.listdir(directory) @@ -286,62 +350,43 @@ class SiteStorage(object): # Site content updated def onUpdated(self, inner_path, file=None): # Update Sql cache + should_load_to_db = inner_path.endswith(".json") or inner_path.endswith(".json.gz") if inner_path == "dbschema.json": self.has_db = self.isFile("dbschema.json") # Reopen DB to check changes if self.has_db: - self.closeDb() - self.getDb() - elif not config.disable_db and (inner_path.endswith(".json") or inner_path.endswith(".json.gz")) and self.has_db: # Load json file to db + self.closeDb("New dbschema") + gevent.spawn(self.getDb) + elif not config.disable_db and should_load_to_db and self.has_db: # Load json file to db if config.verbose: self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file)) try: self.updateDbFile(inner_path, file) - except Exception, err: + except Exception as err: self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err))) - self.closeDb() + self.closeDb("Json load error") # Load and parse json file + @thread_pool_fs_read.wrap def loadJson(self, inner_path): - with self.open(inner_path) as file: - return json.load(file) - - def formatJson(self, data): - content = json.dumps(data, indent=1, sort_keys=True) - - # Make it a little more compact by removing unnecessary white space - def compact_dict(match): - if "\n" in match.group(0): - return match.group(0).replace(match.group(1), match.group(1).strip()) - else: - return match.group(0) - - content = re.sub("\{(\n[^,\[\{]{10,100}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL) - - def compact_list(match): - if "\n" in match.group(0): - stripped_lines = re.sub("\n[ ]*", "", match.group(1)) - return match.group(0).replace(match.group(1), stripped_lines) - else: - return match.group(0) - - content = re.sub("\[([^\[\{]{2,300}?)\][, ]{0,2}\n", compact_list, content, flags=re.DOTALL) - - # Remove end of line whitespace - content = re.sub("(?m)[ ]+$", "", content) - return content + try: + with self.open(inner_path, "r", encoding="utf8") as file: + return json.load(file) + except Exception as err: + self.log.warning("Json load error: %s" % Debug.formatException(err)) + return None # Write formatted json file def writeJson(self, inner_path, data): # Write to disk - self.write(inner_path, self.formatJson(data)) + self.write(inner_path, helper.jsonDumps(data).encode("utf8")) # Get file size def getSize(self, inner_path): path = self.getPath(inner_path) try: return os.path.getsize(path) - except: + except Exception: return 0 # File exist @@ -363,9 +408,9 @@ class SiteStorage(object): return self.directory if "../" in inner_path: - raise Exception(u"File not allowed: %s" % inner_path) + raise Exception("File not allowed: %s" % inner_path) - return u"%s/%s" % (self.directory, inner_path) + return "%s/%s" % (self.directory, inner_path) # Get site dir relative path def getInnerPath(self, path): @@ -375,7 +420,7 @@ class SiteStorage(object): if path.startswith(self.directory): inner_path = path[len(self.directory) + 1:] else: - raise Exception(u"File not allowed: %s" % path) + raise Exception("File not allowed: %s" % path) return inner_path # Verify all files sha512sum using content.json @@ -390,17 +435,17 @@ class SiteStorage(object): self.log.debug("VerifyFile content.json not exists") self.site.needFile("content.json", update=True) # Force update to fix corrupt file self.site.content_manager.loadContent() # Reload content.json - for content_inner_path, content in self.site.content_manager.contents.items(): + for content_inner_path, content in list(self.site.content_manager.contents.items()): back["num_content"] += 1 i += 1 if i % 50 == 0: - time.sleep(0.0001) # Context switch to avoid gevent hangs + time.sleep(0.001) # Context switch to avoid gevent hangs if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file back["num_content_missing"] += 1 self.log.debug("[MISSING] %s" % content_inner_path) bad_files.append(content_inner_path) - for file_relative_path in content.get("files", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()): back["num_file"] += 1 file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / @@ -418,7 +463,8 @@ class SiteStorage(object): else: try: ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception, err: + except Exception as _err: + err = _err ok = False if not ok: @@ -430,7 +476,7 @@ class SiteStorage(object): # Optional files optional_added = 0 optional_removed = 0 - for file_relative_path in content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files_optional", {}).keys()): back["num_optional"] += 1 file_node = content["files_optional"][file_relative_path] file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir @@ -440,9 +486,10 @@ class SiteStorage(object): if not os.path.isfile(file_path): if self.site.content_manager.isDownloaded(file_inner_path, hash_id): back["num_optional_removed"] += 1 - self.log.debug("[OPTIONAL REMOVED] %s" % file_inner_path) + self.log.debug("[OPTIONAL MISSING] %s" % file_inner_path) self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"]) - if add_optional: + if add_optional and self.site.isDownloadable(file_inner_path): + self.log.debug("[OPTIONAL ADDING] %s" % file_inner_path) bad_files.append(file_inner_path) continue @@ -451,7 +498,7 @@ class SiteStorage(object): else: try: ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception, err: + except Exception as err: ok = False if ok: @@ -475,7 +522,7 @@ class SiteStorage(object): ) self.site.content_manager.contents.db.processDelayed() - time.sleep(0.0001) # Context switch to avoid gevent hangs + time.sleep(0.001) # Context switch to avoid gevent hangs return back # Check and try to fix site files integrity @@ -483,7 +530,7 @@ class SiteStorage(object): s = time.time() res = self.verifyFiles( quick_check, - add_optional=self.site.isDownloadable(""), + add_optional=True, add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned ) bad_files = res["bad_files"] @@ -494,58 +541,96 @@ class SiteStorage(object): self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check)) # Delete site's all file + @thread_pool_fs_batch.wrap def deleteFiles(self): - self.log.debug("Deleting files from content.json...") + site_title = self.site.content_manager.contents.get("content.json", {}).get("title", self.site.address) + message_id = "delete-%s" % self.site.address + self.log.debug("Deleting files from content.json (title: %s)..." % site_title) + files = [] # Get filenames - for content_inner_path in self.site.content_manager.contents.keys(): + content_inner_paths = list(self.site.content_manager.contents.keys()) + for i, content_inner_path in enumerate(content_inner_paths): content = self.site.content_manager.contents.get(content_inner_path, {}) files.append(content_inner_path) # Add normal files - for file_relative_path in content.get("files", {}).keys(): + for file_relative_path in list(content.get("files", {}).keys()): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir files.append(file_inner_path) # Add optional files - for file_relative_path in content.get("files_optional", {}).keys(): + for file_relative_path in list(content.get("files_optional", {}).keys()): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir files.append(file_inner_path) + if i % 100 == 0: + num_files = len(files) + self.site.messageWebsocket( + _("Deleting site {site_title}...
    Collected {num_files} files"), + message_id, (i / len(content_inner_paths)) * 25 + ) + if self.isFile("dbschema.json"): self.log.debug("Deleting db file...") - self.closeDb() + self.closeDb("Deleting site") self.has_db = False try: schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path): os.unlink(db_path) - except Exception, err: + except Exception as err: self.log.error("Db file delete error: %s" % err) - for inner_path in files: + num_files = len(files) + for i, inner_path in enumerate(files): path = self.getPath(inner_path) if os.path.isfile(path): for retry in range(5): try: os.unlink(path) break - except Exception, err: - self.log.error(u"Error removing %s: %s, try #%s" % (inner_path, err, retry)) + except Exception as err: + self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry)) time.sleep(float(retry) / 10) + if i % 100 == 0: + self.site.messageWebsocket( + _("Deleting site {site_title}...
    Deleting file {i}/{num_files}"), + message_id, 25 + (i / num_files) * 50 + ) self.onUpdated(inner_path, False) self.log.debug("Deleting empty dirs...") + i = 0 for root, dirs, files in os.walk(self.directory, topdown=False): for dir in dirs: path = os.path.join(root, dir) - if os.path.isdir(path) and os.listdir(path) == []: - os.rmdir(path) - self.log.debug("Removing %s" % path) + if os.path.isdir(path): + try: + i += 1 + if i % 100 == 0: + self.site.messageWebsocket( + _("Deleting site {site_title}...
    Deleting empty directories {i}"), + message_id, 85 + ) + os.rmdir(path) + except OSError: # Not empty + pass + if os.path.isdir(self.directory) and os.listdir(self.directory) == []: os.rmdir(self.directory) # Remove sites directory if empty if os.path.isdir(self.directory): self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory) + self.site.messageWebsocket( + _("Deleting site {site_title}...
    Site deleted, but some unknown files left in the directory"), + message_id, 100 + ) return False # Some files not deleted else: - self.log.debug("Site data directory deleted: %s..." % self.directory) + self.log.debug("Site %s data directory deleted: %s..." % (site_title, self.directory)) + + self.site.messageWebsocket( + _("Deleting site {site_title}...
    All files deleted successfully"), + message_id, 100 + ) + return True # All clean diff --git a/src/Site/__init__.py b/src/Site/__init__.py index 07a21d40..e69de29b 100644 --- a/src/Site/__init__.py +++ b/src/Site/__init__.py @@ -1,3 +0,0 @@ -from Site import Site -from SiteStorage import SiteStorage -from SiteAnnouncer import SiteAnnouncer diff --git a/src/Test/BenchmarkSsl.py b/src/Test/BenchmarkSsl.py index 06b18537..06181b89 100644 --- a/src/Test/BenchmarkSsl.py +++ b/src/Test/BenchmarkSsl.py @@ -8,7 +8,7 @@ import socket import ssl sys.path.append(os.path.abspath("..")) # Imports relative to src dir -import cStringIO as StringIO +import io as StringIO import gevent from gevent.server import StreamServer @@ -46,8 +46,8 @@ def handle(sock_raw, addr): ) else: sock.sendall(data) - except Exception, err: - print err + except Exception as err: + print(err) try: sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -102,7 +102,7 @@ def getData(): total_num += 1 total_bytes += buff.tell() if not data: - print "No data" + print("No data") sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -119,8 +119,8 @@ def info(): else: memory_info = process.get_memory_info while 1: - print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, - print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20) + print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ') + print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)) time.sleep(1) gevent.spawn(info) @@ -132,7 +132,7 @@ for test in range(1): gevent.joinall(clients) -print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s +print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s) # Separate client/server process: # 10*10*100: diff --git a/src/Test/Spy.py b/src/Test/Spy.py index 8d8f1800..44422550 100644 --- a/src/Test/Spy.py +++ b/src/Test/Spy.py @@ -1,20 +1,23 @@ +import logging + class Spy: def __init__(self, obj, func_name): self.obj = obj - self.func_name = func_name + self.__name__ = func_name self.func_original = getattr(self.obj, func_name) self.calls = [] def __enter__(self, *args, **kwargs): + logging.debug("Spy started") def loggedFunc(cls, *args, **kwargs): call = dict(enumerate(args, 1)) call[0] = cls call.update(kwargs) - print "Logging", call + logging.debug("Spy call: %s" % call) self.calls.append(call) return self.func_original(cls, *args, **kwargs) - setattr(self.obj, self.func_name, loggedFunc) + setattr(self.obj, self.__name__, loggedFunc) return self.calls def __exit__(self, *args, **kwargs): - setattr(self.obj, self.func_name, self.func_original) \ No newline at end of file + setattr(self.obj, self.__name__, self.func_original) \ No newline at end of file diff --git a/src/Test/TestCached.py b/src/Test/TestCached.py new file mode 100644 index 00000000..088962c0 --- /dev/null +++ b/src/Test/TestCached.py @@ -0,0 +1,59 @@ +import time + +from util import Cached + + +class CachedObject: + def __init__(self): + self.num_called_add = 0 + self.num_called_multiply = 0 + self.num_called_none = 0 + + @Cached(timeout=1) + def calcAdd(self, a, b): + self.num_called_add += 1 + return a + b + + @Cached(timeout=1) + def calcMultiply(self, a, b): + self.num_called_multiply += 1 + return a * b + + @Cached(timeout=1) + def none(self): + self.num_called_none += 1 + return None + + +class TestCached: + def testNoneValue(self): + cached_object = CachedObject() + assert cached_object.none() is None + assert cached_object.none() is None + assert cached_object.num_called_none == 1 + time.sleep(2) + assert cached_object.none() is None + assert cached_object.num_called_none == 2 + + def testCall(self): + cached_object = CachedObject() + + assert cached_object.calcAdd(1, 2) == 3 + assert cached_object.calcAdd(1, 2) == 3 + assert cached_object.calcMultiply(1, 2) == 2 + assert cached_object.calcMultiply(1, 2) == 2 + assert cached_object.num_called_add == 1 + assert cached_object.num_called_multiply == 1 + + assert cached_object.calcAdd(2, 3) == 5 + assert cached_object.calcAdd(2, 3) == 5 + assert cached_object.num_called_add == 2 + + assert cached_object.calcAdd(1, 2) == 3 + assert cached_object.calcMultiply(2, 3) == 6 + assert cached_object.num_called_add == 2 + assert cached_object.num_called_multiply == 2 + + time.sleep(2) + assert cached_object.calcAdd(1, 2) == 3 + assert cached_object.num_called_add == 3 diff --git a/src/Test/TestConnectionServer.py b/src/Test/TestConnectionServer.py index b17d3f76..82ee605c 100644 --- a/src/Test/TestConnectionServer.py +++ b/src/Test/TestConnectionServer.py @@ -45,10 +45,11 @@ class TestConnection: # Close connection - connection.close() + connection.close("Test ended") client.stop() - time.sleep(0.01) + time.sleep(0.1) assert len(file_server.connections) == 0 + assert file_server.num_incoming == 2 # One for file_server fixture, one for the test def testRawConnection(self, file_server): client = ConnectionServer(file_server.ip, 1545) diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py index e4afb91e..7e7ca1a5 100644 --- a/src/Test/TestContent.py +++ b/src/Test/TestContent.py @@ -1,6 +1,6 @@ import json import time -from cStringIO import StringIO +import io import pytest @@ -38,8 +38,9 @@ class TestContent: # Valid signers for root content.json assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] - def testInlcudeLimits(self, site): + def testInlcudeLimits(self, site, crypt_bitcoin_lib): # Data validation + res = [] data_dict = { "files": { "data.json": { @@ -51,19 +52,21 @@ class TestContent: } # Normal data - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} + data_json = json.dumps(data_dict).encode() + data = io.BytesIO(data_json) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) + # Reset del data_dict["signs"] # Too large data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Include too large" in str(err) + assert "Include too large" in str(err.value) # Reset data_dict["files"]["data.json"]["size"] = 505 @@ -71,19 +74,19 @@ class TestContent: # Not allowed file data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"] - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "File not allowed" in str(err) + assert "File not allowed" in str(err.value) # Reset del data_dict["files"]["notallowed.exe"] del data_dict["signs"] # Should work again - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} - data = StringIO(json.dumps(data_dict)) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} + data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) @pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"]) @@ -91,7 +94,7 @@ class TestContent: # Bad privatekey with pytest.raises(SignError) as err: site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) - assert "Private key invalid" in str(err) + assert "Private key invalid" in str(err.value) # Good privatekey content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False) @@ -149,10 +152,10 @@ class TestContent: assert "sha512" in file_info_optional assert file_info_optional["optional"] is True - def testVerify(self, site): + def testVerify(self, site, crypt_bitcoin_lib): inner_path = "data/test_include/content.json" data_dict = site.storage.loadJson(inner_path) - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode("utf8")) # Re-sign data_dict["signs"] = { @@ -166,10 +169,10 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong site address" in str(err) + assert "Wrong site address" in str(err.value) # Wrong inner_path data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" @@ -178,10 +181,10 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong inner_path" in str(err) + assert "Wrong inner_path" in str(err.value) # Everything right again data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" @@ -190,14 +193,14 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - def testVerifyInnerPath(self, site): + def testVerifyInnerPath(self, site, crypt_bitcoin_lib): inner_path = "content.json" data_dict = site.storage.loadJson(inner_path) - for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg"]: + for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "árvzítűrő/tükörfúrógép.txt"]: data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} if "sign" in data_dict: @@ -206,10 +209,10 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]: + for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]: data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} if "sign" in data_dict: @@ -218,28 +221,53 @@ class TestContent: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Invalid relative path" in str(err) + assert "Invalid relative path" in str(err.value) @pytest.mark.parametrize("key", ["ignore", "optional"]) def testSignUnsafePattern(self, site, key): site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*" with pytest.raises(UnsafePatternError) as err: site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False) - assert "Potentially unsafe" in str(err) + assert "Potentially unsafe" in str(err.value) - def testVerifyUnsafePattern(self, site): + def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib): site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*" with pytest.raises(UnsafePatternError) as err: with site.storage.open("data/test_include/content.json") as data: site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err) + assert "Potentially unsafe" in str(err.value) site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0} with pytest.raises(UnsafePatternError) as err: with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data: site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err) + assert "Potentially unsafe" in str(err.value) + + def testPathValidation(self, site): + assert site.content_manager.isValidRelativePath("test.txt") + assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt") + assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt") + assert site.content_manager.isValidRelativePath("тест.текст") + assert site.content_manager.isValidRelativePath("𝐮𝐧𝐢𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆") + + # Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names + + assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed + assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with / + assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \ + assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path + assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character + assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters) + assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space + assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot + assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot + assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows + assert not site.content_manager.isValidRelativePath("CON/any.txt") + assert not site.content_manager.isValidRelativePath("any/lpt1.txt") + assert site.content_manager.isValidRelativePath("any/CONAN") + assert not site.content_manager.isValidRelativePath("any/CONOUT$") + assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py index 46d8bdef..8e91dd3e 100644 --- a/src/Test/TestContentUser.py +++ b/src/Test/TestContentUser.py @@ -1,5 +1,5 @@ import json -from cStringIO import StringIO +import io import pytest @@ -91,7 +91,7 @@ class TestContentUser: data_dict = site.storage.loadJson(user_inner_path) users_content = site.content_manager.contents["data/users/content.json"] - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) # Test error on 15k data.json @@ -100,10 +100,10 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err) + assert "Include too large" in str(err.value) # Give more space based on address users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} @@ -111,7 +111,7 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) def testVerify(self, site): @@ -120,7 +120,7 @@ class TestContentUser: data_dict = site.storage.loadJson(user_inner_path) users_content = site.content_manager.contents["data/users/content.json"] - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) # Test max size exception by setting allowed to 0 @@ -131,11 +131,11 @@ class TestContentUser: users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0 rules = site.content_manager.getRules(user_inner_path, data_dict) assert rules["max_size"] == 0 - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err) + assert "Include too large" in str(err.value) users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset # Test max optional size exception @@ -145,7 +145,7 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) # 100 MB gif = Not allowed @@ -154,10 +154,10 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include optional files too large" in str(err) + assert "Include optional files too large" in str(err.value) data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset # hello.exe = Not allowed @@ -166,10 +166,10 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Optional file not allowed" in str(err) + assert "Optional file not allowed" in str(err.value) del data_dict["files_optional"]["hello.exe"] # Reset # Includes not allowed in user content @@ -178,10 +178,10 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = StringIO(json.dumps(data_dict)) + data = io.BytesIO(json.dumps(data_dict).encode()) with pytest.raises(VerifyError) as err: site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Includes not allowed" in str(err) + assert "Includes not allowed" in str(err.value) def testCert(self, site): # user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" @@ -227,7 +227,7 @@ class TestContentUser: # Test user cert assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) # Test banned user @@ -236,9 +236,9 @@ class TestContentUser: with pytest.raises(VerifyError) as err: site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Valid signs: 0/1" in str(err) + assert "Valid signs: 0/1" in str(err.value) del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset # Test invalid cert @@ -251,9 +251,9 @@ class TestContentUser: with pytest.raises(VerifyError) as err: site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid cert" in str(err) + assert "Invalid cert" in str(err.value) # Test banned user, signed by the site owner user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( @@ -270,7 +270,7 @@ class TestContentUser: } assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(user_content)), ignore_same=False + io.BytesIO(json.dumps(user_content).encode()), ignore_same=False ) def testMissingCert(self, site): @@ -297,7 +297,7 @@ class TestContentUser: assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) # Test invalid cert_user_id @@ -311,9 +311,9 @@ class TestContentUser: with pytest.raises(VerifyError) as err: site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid domain in cert_user_id" in str(err) + assert "Invalid domain in cert_user_id" in str(err.value) # Test removed cert del user_content["cert_user_id"] @@ -328,9 +328,9 @@ class TestContentUser: with pytest.raises(VerifyError) as err: site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Missing cert_user_id" in str(err) + assert "Missing cert_user_id" in str(err.value) def testCertSignersPattern(self, site): @@ -355,7 +355,7 @@ class TestContentUser: assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) # Cert does not matches the pattern @@ -364,9 +364,9 @@ class TestContentUser: with pytest.raises(VerifyError) as err: site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err) + assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) # Removed cert_signers_pattern del rules_content["user_contents"]["cert_signers_pattern"] @@ -374,9 +374,9 @@ class TestContentUser: with pytest.raises(VerifyError) as err: site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False + io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err) + assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) def testNewFile(self, site): diff --git a/src/Test/TestCryptBitcoin.py b/src/Test/TestCryptBitcoin.py index a6009679..2bc087b5 100644 --- a/src/Test/TestCryptBitcoin.py +++ b/src/Test/TestCryptBitcoin.py @@ -2,48 +2,37 @@ from Crypt import CryptBitcoin class TestCryptBitcoin: - def testSignOld(self): - privatekey = "23DKQpDz7bXM7w5KN5Wnmz7bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf" - privatekey_bad = "23DKQpDz7bXM7w5KN5Wnmz6bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf" - - # Get address by privatekey - address = CryptBitcoin.privatekeyToAddress(privatekey) - assert address == "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez" - - address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad) - assert not address_bad == "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez" - - # Text signing - sign = CryptBitcoin.signOld("hello", privatekey) - assert CryptBitcoin.verify("hello", address, sign) # Original text - assert not CryptBitcoin.verify("not hello", address, sign) # Different text - - # Signed by bad privatekey - sign_bad = CryptBitcoin.signOld("hello", privatekey_bad) - assert not CryptBitcoin.verify("hello", address, sign_bad) - - def testSign(self): + def testSign(self, crypt_bitcoin_lib): privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C" privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC" # Get address by privatekey - address = CryptBitcoin.privatekeyToAddress(privatekey) + address = crypt_bitcoin_lib.privatekeyToAddress(privatekey) assert address == "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz" - address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad) + address_bad = crypt_bitcoin_lib.privatekeyToAddress(privatekey_bad) assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz" # Text signing - for pad_len in range(0, 300, 10): - pad = pad_len * "!" - sign = CryptBitcoin.sign("hello" + pad, privatekey) + data_len_list = list(range(0, 300, 10)) + data_len_list += [1024, 2048, 1024 * 128, 1024 * 1024, 1024 * 2048] + for data_len in data_len_list: + data = data_len * "!" + sign = crypt_bitcoin_lib.sign(data, privatekey) - assert CryptBitcoin.verify("hello" + pad, address, sign) - assert not CryptBitcoin.verify("not hello" + pad, address, sign) + assert crypt_bitcoin_lib.verify(data, address, sign) + assert not crypt_bitcoin_lib.verify("invalid" + data, address, sign) # Signed by bad privatekey - sign_bad = CryptBitcoin.sign("hello", privatekey_bad) - assert not CryptBitcoin.verify("hello", address, sign_bad) + sign_bad = crypt_bitcoin_lib.sign("hello", privatekey_bad) + assert not crypt_bitcoin_lib.verify("hello", address, sign_bad) + + def testVerify(self, crypt_bitcoin_lib): + sign_uncompressed = b'G6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0=' + assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "19Bir5zRm1yo4pw9uuxQL8xwf9b7jqMpR", sign_uncompressed) + + sign_compressed = b'H6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0=' + assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "1KH5BdNnqxh2KRWMMT8wUXzUgz4vVQ4S8p", sign_compressed) def testNewPrivatekey(self): assert CryptBitcoin.newPrivatekey() != CryptBitcoin.newPrivatekey() diff --git a/src/Test/TestCryptHash.py b/src/Test/TestCryptHash.py new file mode 100644 index 00000000..b91dbcca --- /dev/null +++ b/src/Test/TestCryptHash.py @@ -0,0 +1,31 @@ +import base64 + +from Crypt import CryptHash + +sha512t_sum_hex = "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d" +sha512t_sum_bin = b".\x94f\xd8\xaa\x1f4\x0c\x91 ;M\xdb\xe9\xb6f\x98yaj\x1b\x8e\x95q\x05\x8at\x19Y7Y\x8d" +sha256_sum_hex = "340cd04be7f530e3a7c1bc7b24f225ba5762ec7063a56e1ae01a30d56722e5c3" + + +class TestCryptBitcoin: + + def testSha(self, site): + file_path = site.storage.getPath("dbschema.json") + assert CryptHash.sha512sum(file_path) == sha512t_sum_hex + assert CryptHash.sha512sum(open(file_path, "rb")) == sha512t_sum_hex + assert CryptHash.sha512sum(open(file_path, "rb"), format="digest") == sha512t_sum_bin + + assert CryptHash.sha256sum(file_path) == sha256_sum_hex + assert CryptHash.sha256sum(open(file_path, "rb")) == sha256_sum_hex + + with open(file_path, "rb") as f: + hash = CryptHash.Sha512t(f.read(100)) + hash.hexdigest() != sha512t_sum_hex + hash.update(f.read(1024 * 1024)) + assert hash.hexdigest() == sha512t_sum_hex + + def testRandom(self): + assert len(CryptHash.random(64)) == 64 + assert CryptHash.random() != CryptHash.random() + assert bytes.fromhex(CryptHash.random(encoding="hex")) + assert base64.b64decode(CryptHash.random(encoding="base64")) diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py index d821fe5d..67f383a3 100644 --- a/src/Test/TestDb.py +++ b/src/Test/TestDb.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +import io class TestDb: @@ -63,11 +63,11 @@ class TestDb: # Large ammount of IN values assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__test_id": range(2, 3000)} + {"not__test_id": list(range(2, 3000))} ).fetchone()["num"] == 2 assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE ?", - {"test_id": range(50, 3000)} + {"test_id": list(range(50, 3000))} ).fetchone()["num"] == 50 assert db.execute( @@ -75,6 +75,11 @@ class TestDb: {"not__title": ["Test #%s" % i for i in range(50, 3000)]} ).fetchone()["num"] == 50 + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE ?", + {"title__like": "%20%"} + ).fetchone()["num"] == 1 + # Test named parameter escaping assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike", @@ -103,14 +108,14 @@ class TestDb: def testUpdateJson(self, db): - f = StringIO.StringIO() + f = io.BytesIO() f.write(""" { "test": [ {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"} ] } - """) + """.encode()) f.seek(0) assert db.updateJson(db.db_dir + "data.json", f) is True assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1 @@ -118,7 +123,7 @@ class TestDb: def testUnsafePattern(self, db): db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported - f = StringIO.StringIO() + f = io.StringIO() f.write(""" { "test": [ diff --git a/src/Test/TestDbQuery.py b/src/Test/TestDbQuery.py index 214704a4..597bc950 100644 --- a/src/Test/TestDbQuery.py +++ b/src/Test/TestDbQuery.py @@ -1,6 +1,6 @@ import re -from Db import DbQuery +from Db.DbQuery import DbQuery class TestDbQuery: diff --git a/src/Test/TestDebug.py b/src/Test/TestDebug.py new file mode 100644 index 00000000..e3eb20b3 --- /dev/null +++ b/src/Test/TestDebug.py @@ -0,0 +1,52 @@ +from Debug import Debug +import gevent +import os +import re + +import pytest + + +class TestDebug: + @pytest.mark.parametrize("items,expected", [ + (["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test + (["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression + (["%s:1" % __file__], ["TestDebug.py line 1"]), + (["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins + (["@/main.py:17"], ["main.py line 17"]), # root + (["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths + ([":1"], []), # importlib builtins + ([":1"], []), # importlib builtins + (["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization + (["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]), + (["/root/main.py:17"], ["/root/main.py line 17"]), + (["{gevent}:13"], ["/__init__.py line 13"]), # modules + (["{os}:13"], [" line 13"]), # python builtin modules + (["src/gevent/event.py:17"], ["/event.py line 17"]), # gevent-overriden __file__ + (["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args + (["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file + (["{os}:1", "@/src/Db/Db.py:17"], [" line 1", "Db.py line 17"]), # builtins + (["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["/__init__.py line 1", "...", "Db.py line 17"]) + ]) + def testFormatTraceback(self, items, expected): + q_items = [] + for item in items: + file, line = item.rsplit(":", 1) + if file.startswith("@"): + file = Debug.root_dir + file[1:] + file = file.replace("{os}", os.__file__) + file = file.replace("{gevent}", gevent.__file__) + q_items.append((file, int(line))) + assert Debug.formatTraceback(q_items) == expected + + def testFormatException(self): + try: + raise ValueError("Test exception") + except Exception: + assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException()) + try: + os.path.abspath(1) + except Exception: + assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException()) + + def testFormatStack(self): + assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack()) diff --git a/src/Test/TestDiff.py b/src/Test/TestDiff.py index 0e387e2a..622951a1 100644 --- a/src/Test/TestDiff.py +++ b/src/Test/TestDiff.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +import io from util import Diff @@ -30,20 +30,26 @@ class TestDiff: [] ) == [("-", 11)] + def testUtf8(self): + assert Diff.diff( + ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three"], + ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three", "four", "five"] + ) == [("=", 20), ("+", ["four", "five"])] + def testDiffLimit(self): - old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") - new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix") + old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") + new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") actions = Diff.diff(list(old_f), list(new_f), limit=1024) assert actions - old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") - new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix"*1024) + old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") + new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024) actions = Diff.diff(list(old_f), list(new_f), limit=1024) assert actions is False def testPatch(self): - old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") - new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix") + old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") + new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") actions = Diff.diff( list(old_f), list(new_f) diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py index 5210ce82..3fabc271 100644 --- a/src/Test/TestFileRequest.py +++ b/src/Test/TestFileRequest.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +import io import pytest import time @@ -20,10 +20,10 @@ class TestFileRequest: # Normal request response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) - assert "sign" in response["body"] + assert b"sign" in response["body"] response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")}) - assert "sign" in response["body"] + assert b"sign" in response["body"] # Invalid file response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}) @@ -35,7 +35,7 @@ class TestFileRequest: # Stream from parent dir response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0}) - assert "File read error" in response["error"] + assert "File read exception" in response["error"] # Invalid site response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0}) @@ -48,6 +48,12 @@ class TestFileRequest: response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234}) assert "File size does not match" in response["error"] + # Invalid path + for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]: + for sep in ["/", "\\"]: + response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0}) + assert response["error"] == 'File read exception' + connection.close() client.stop() @@ -57,27 +63,27 @@ class TestFileRequest: connection = client.getConnection(file_server.ip, 1544) file_server.sites[site.address] = site - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff) assert "stream_bytes" in response - assert "sign" in buff.getvalue() + assert b"sign" in buff.getvalue() # Invalid file - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff) assert "File read error" in response["error"] # Location over size - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request( "streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff ) assert "File read error" in response["error"] # Stream from parent dir - buff = StringIO.StringIO() + buff = io.BytesIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff) - assert "File read error" in response["error"] + assert "File read exception" in response["error"] connection.close() client.stop() @@ -85,7 +91,7 @@ class TestFileRequest: def testPex(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) diff --git a/src/Test/TestFlag.py b/src/Test/TestFlag.py new file mode 100644 index 00000000..12fd8165 --- /dev/null +++ b/src/Test/TestFlag.py @@ -0,0 +1,39 @@ +import os + +import pytest + +from util.Flag import Flag + +class TestFlag: + def testFlagging(self): + flag = Flag() + @flag.admin + @flag.no_multiuser + def testFn(anything): + return anything + + assert "admin" in flag.db["testFn"] + assert "no_multiuser" in flag.db["testFn"] + + def testSubclassedFlagging(self): + flag = Flag() + class Test: + @flag.admin + @flag.no_multiuser + def testFn(anything): + return anything + + class SubTest(Test): + pass + + assert "admin" in flag.db["testFn"] + assert "no_multiuser" in flag.db["testFn"] + + def testInvalidFlag(self): + flag = Flag() + with pytest.raises(Exception) as err: + @flag.no_multiuser + @flag.unknown_flag + def testFn(anything): + return anything + assert "Invalid flag" in str(err.value) diff --git a/src/Test/TestHelper.py b/src/Test/TestHelper.py index 82b3258a..07644ec0 100644 --- a/src/Test/TestHelper.py +++ b/src/Test/TestHelper.py @@ -1,8 +1,10 @@ import socket import struct +import os import pytest from util import helper +from Config import config @pytest.mark.usefixtures("resetSettings") @@ -22,13 +24,16 @@ class TestHelper: assert len(helper.packAddress(ip, port)) == 18 assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port) - with pytest.raises(struct.error) as err: + assert len(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == 12 + assert helper.unpackOnionAddress(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == ("boot3rdez4rzn36x.onion", port) + + with pytest.raises(struct.error): helper.packAddress("1.1.1.1", 100000) with pytest.raises(socket.error): helper.packAddress("999.1.1.1", 1) - with pytest.raises(AssertionError): + with pytest.raises(Exception): helper.unpackAddress("X") def testGetDirname(self): @@ -39,7 +44,6 @@ class TestHelper: assert helper.getDirname("data/users/") == "data/users/" assert helper.getDirname("/data/users/content.json") == "data/users/" - def testGetFilename(self): assert helper.getFilename("data/users/content.json") == "content.json" assert helper.getFilename("data/users") == "users" @@ -60,3 +64,16 @@ class TestHelper: assert not helper.isPrivateIp("1.1.1.1") assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c") assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns + + def testOpenLocked(self): + locked_f = helper.openLocked(config.data_dir + "/locked.file") + assert locked_f + with pytest.raises(BlockingIOError): + locked_f_again = helper.openLocked(config.data_dir + "/locked.file") + locked_f_different = helper.openLocked(config.data_dir + "/locked_different.file") + + locked_f.close() + locked_f_different.close() + + os.unlink(locked_f.name) + os.unlink(locked_f_different.name) diff --git a/src/Test/TestMsgpack.py b/src/Test/TestMsgpack.py index 3665a0a4..5a0b6d4d 100644 --- a/src/Test/TestMsgpack.py +++ b/src/Test/TestMsgpack.py @@ -1,28 +1,36 @@ -import cStringIO as StringIO +import io +import os import msgpack import pytest from Config import config -from util import StreamingMsgpack +from util import Msgpack +from collections import OrderedDict class TestMsgpack: - test_data = {"cmd": "fileGet", "params": {"site": "1Site"}} + test_data = OrderedDict( + sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items()) + ) + + def testPacking(self): + assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91' + assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91' def testUnpackinkg(self): - assert msgpack.unpackb(msgpack.packb(self.test_data)) == self.test_data + assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data @pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker]) def testUnpacker(self, unpacker_class): - unpacker = unpacker_class() + unpacker = unpacker_class(raw=False) - data = msgpack.packb(self.test_data) - data += msgpack.packb(self.test_data) + data = msgpack.packb(self.test_data, use_bin_type=True) + data += msgpack.packb(self.test_data, use_bin_type=True) messages = [] for char in data: - unpacker.feed(char) + unpacker.feed(bytes([char])) for message in unpacker: messages.append(message) @@ -31,16 +39,50 @@ class TestMsgpack: assert messages[0] == messages[1] def testStreaming(self): - f = StreamingMsgpack.FilePart("%s/users.json" % config.data_dir) - f.read_bytes = 10 + bin_data = os.urandom(20) + f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb") + f.read_bytes = 30 - data = {"cmd": "response", "params": f} + data = {"cmd": "response", "body": f, "bin": bin_data} - out_buff = StringIO.StringIO() - StreamingMsgpack.stream(data, out_buff.write) + out_buff = io.BytesIO() + Msgpack.stream(data, out_buff.write) out_buff.seek(0) - data_packb = {"cmd": "response", "params": open("%s/users.json" % config.data_dir).read(10)} + data_packb = { + "cmd": "response", + "body": open("%s/users.json" % config.data_dir, "rb").read(30), + "bin": bin_data + } out_buff.seek(0) - assert msgpack.unpackb(out_buff.read()) == data_packb + data_unpacked = Msgpack.unpack(out_buff.read()) + assert data_unpacked == data_packb + assert data_unpacked["cmd"] == "response" + assert type(data_unpacked["body"]) == bytes + + def testBackwardCompatibility(self): + packed = {} + packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False) + packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True) + for key, val in packed.items(): + unpacked = Msgpack.unpack(val) + type(unpacked["utf8"]) == str + type(unpacked["bin"]) == bytes + + # Packed with use_bin_type=False (pre-ZeroNet 0.7.0) + unpacked = Msgpack.unpack(packed["py3"], decode=True) + type(unpacked["utf8"]) == str + type(unpacked["bin"]) == bytes + assert len(unpacked["utf8"]) == 9 + assert len(unpacked["bin"]) == 10 + with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8 + unpacked = Msgpack.unpack(packed["py3"], decode=False) + + # Packed with use_bin_type=True + unpacked = Msgpack.unpack(packed["py3_bin"], decode=False) + type(unpacked["utf8"]) == str + type(unpacked["bin"]) == bytes + assert len(unpacked["utf8"]) == 9 + assert len(unpacked["bin"]) == 10 + diff --git a/src/Test/TestNoparallel.py b/src/Test/TestNoparallel.py index 5c0242f5..6fc4f57d 100644 --- a/src/Test/TestNoparallel.py +++ b/src/Test/TestNoparallel.py @@ -1,7 +1,19 @@ import time -import util import gevent +import pytest + +import util +from util import ThreadPool + + +@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn']) +def queue_spawn(request): + thread_pool = ThreadPool.ThreadPool(10) + if request.param == "gevent.spawn": + return gevent.spawn + else: + return thread_pool.spawn class ExampleClass(object): @@ -11,7 +23,7 @@ class ExampleClass(object): @util.Noparallel() def countBlocking(self, num=5): for i in range(1, num + 1): - time.sleep(0.01) + time.sleep(0.1) self.counted += 1 return "counted:%s" % i @@ -31,20 +43,20 @@ class ExampleClass(object): class TestNoparallel: - def testBlocking(self): + def testBlocking(self, queue_spawn): obj1 = ExampleClass() obj2 = ExampleClass() # Dont allow to call again until its running and wait until its running threads = [ - gevent.spawn(obj1.countBlocking), - gevent.spawn(obj1.countBlocking), - gevent.spawn(obj1.countBlocking), - gevent.spawn(obj2.countBlocking) + queue_spawn(obj1.countBlocking), + queue_spawn(obj1.countBlocking), + queue_spawn(obj1.countBlocking), + queue_spawn(obj2.countBlocking) ] assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes gevent.joinall(threads) - assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"] # Check the return value for every call + assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"] obj2.countBlocking() # Allow to call again as obj2.countBlocking finished assert obj1.counted == 5 @@ -52,7 +64,6 @@ class TestNoparallel: def testNoblocking(self): obj1 = ExampleClass() - obj2 = ExampleClass() thread1 = obj1.countNoblocking() thread2 = obj1.countNoblocking() # Ignored @@ -66,24 +77,24 @@ class TestNoparallel: obj1.countNoblocking().join() # Allow again and wait until finishes assert obj1.counted == 10 - def testQueue(self): + def testQueue(self, queue_spawn): obj1 = ExampleClass() - gevent.spawn(obj1.countQueue, num=10) - gevent.spawn(obj1.countQueue, num=10) - gevent.spawn(obj1.countQueue, num=10) + queue_spawn(obj1.countQueue, num=1) + queue_spawn(obj1.countQueue, num=1) + queue_spawn(obj1.countQueue, num=1) - time.sleep(3.0) - assert obj1.counted == 20 # No multi-queue supported + time.sleep(0.3) + assert obj1.counted == 2 # No multi-queue supported obj2 = ExampleClass() - gevent.spawn(obj2.countQueue, num=10) - gevent.spawn(obj2.countQueue, num=10) + queue_spawn(obj2.countQueue, num=10) + queue_spawn(obj2.countQueue, num=10) time.sleep(1.5) # Call 1 finished, call 2 still working assert 10 < obj2.counted < 20 - gevent.spawn(obj2.countQueue, num=10) + queue_spawn(obj2.countQueue, num=10) time.sleep(2.0) assert obj2.counted == 30 @@ -92,29 +103,65 @@ class TestNoparallel: obj1 = ExampleClass() threads = [] - for i in range(10000): + for i in range(1000): thread = gevent.spawn(obj1.countQueue, num=5) threads.append(thread) gevent.joinall(threads) - assert obj1.counted == 5 * 2 # Only called twice + assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed) - def testIgnoreClass(self): + def testIgnoreClass(self, queue_spawn): obj1 = ExampleClass() obj2 = ExampleClass() threads = [ - gevent.spawn(obj1.countQueue), - gevent.spawn(obj1.countQueue), - gevent.spawn(obj1.countQueue), - gevent.spawn(obj2.countQueue), - gevent.spawn(obj2.countQueue) + queue_spawn(obj1.countQueue), + queue_spawn(obj1.countQueue), + queue_spawn(obj1.countQueue), + queue_spawn(obj2.countQueue), + queue_spawn(obj2.countQueue) ] s = time.time() + time.sleep(0.001) gevent.joinall(threads) - # Queue limited to 2 calls (very call takes counts to 5 and takes 0.05 sec) + # Queue limited to 2 calls (every call takes counts to 5 and takes 0.05 sec) assert obj1.counted + obj2.counted == 10 taken = time.time() - s - assert 1.1 > taken >= 1.0 # 2 * 0.5s count = ~1s + assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s + + def testException(self, queue_spawn): + class MyException(Exception): + pass + + @util.Noparallel() + def raiseException(): + raise MyException("Test error!") + + with pytest.raises(MyException) as err: + raiseException() + assert str(err.value) == "Test error!" + + with pytest.raises(MyException) as err: + queue_spawn(raiseException).get() + assert str(err.value) == "Test error!" + + def testMultithreadMix(self, queue_spawn): + obj1 = ExampleClass() + with ThreadPool.ThreadPool(10) as thread_pool: + s = time.time() + t1 = queue_spawn(obj1.countBlocking, 5) + time.sleep(0.01) + t2 = thread_pool.spawn(obj1.countBlocking, 5) + time.sleep(0.01) + t3 = thread_pool.spawn(obj1.countBlocking, 5) + time.sleep(0.3) + t4 = gevent.spawn(obj1.countBlocking, 5) + threads = [t1, t2, t3, t4] + for thread in threads: + assert thread.get() == "counted:5" + + time_taken = time.time() - s + assert obj1.counted == 5 + assert 0.5 < time_taken < 0.7 diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py index 32ca1323..f57e046e 100644 --- a/src/Test/TestPeer.py +++ b/src/Test/TestPeer.py @@ -1,12 +1,12 @@ import time -from cStringIO import StringIO +import io import pytest from File import FileServer from File import FileRequest from Crypt import CryptHash -import Spy +from . import Spy @pytest.mark.usefixtures("resetSettings") @@ -15,7 +15,7 @@ class TestPeer: def testPing(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) @@ -34,7 +34,7 @@ class TestPeer: def testDownloadFile(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client connection = client.getConnection(file_server.ip, 1544) @@ -43,17 +43,17 @@ class TestPeer: # Testing streamFile buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True) - assert "sign" in buff.getvalue() + assert b"sign" in buff.getvalue() # Testing getFile buff = peer_file_server.getFile(site_temp.address, "content.json") - assert "sign" in buff.getvalue() + assert b"sign" in buff.getvalue() connection.close() client.stop() def testHashfield(self, site): - sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"] + sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"] site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -65,7 +65,7 @@ class TestPeer: assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield # Add new hash - new_hash = CryptHash.sha512sum(StringIO("hello")) + new_hash = CryptHash.sha512sum(io.BytesIO(b"hello")) assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield assert site.content_manager.hashfield.appendHash(new_hash) assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time @@ -129,7 +129,7 @@ class TestPeer: def testFindHash(self, file_server, site, site_temp): file_server.sites[site.address] = site client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client # Add file_server as peer to client diff --git a/src/Test/TestSafeRe.py b/src/Test/TestSafeRe.py index b8037123..429bde50 100644 --- a/src/Test/TestSafeRe.py +++ b/src/Test/TestSafeRe.py @@ -15,10 +15,10 @@ class TestSafeRe: def testUnsafeMatch(self, pattern): with pytest.raises(SafeRe.UnsafePatternError) as err: SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "Potentially unsafe" in str(err) + assert "Potentially unsafe" in str(err.value) @pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"]) def testUnsafeRepetition(self, pattern): with pytest.raises(SafeRe.UnsafePatternError) as err: SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "More than" in str(err) + assert "More than" in str(err.value) diff --git a/src/Test/TestSite.py b/src/Test/TestSite.py index b9a40064..05bb2ed9 100644 --- a/src/Test/TestSite.py +++ b/src/Test/TestSite.py @@ -4,16 +4,17 @@ import os import pytest from Site import SiteManager +TEST_DATA_PATH = "src/Test/testdata" @pytest.mark.usefixtures("resetSettings") class TestSite: def testClone(self, site): - assert site.storage.directory == "src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" + assert site.storage.directory == TEST_DATA_PATH + "/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" # Remove old files - if os.path.isdir("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"): - shutil.rmtree("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") - assert not os.path.isfile("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json") + if os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"): + shutil.rmtree(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") + assert not os.path.isfile(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json") # Clone 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT to 15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc new_site = site.clone( @@ -36,8 +37,8 @@ class TestSite: # Test re-cloning (updating) # Changes in non-data files should be overwritten - new_site.storage.write("index.html", "this will be overwritten") - assert new_site.storage.read("index.html") == "this will be overwritten" + new_site.storage.write("index.html", b"this will be overwritten") + assert new_site.storage.read("index.html") == b"this will be overwritten" # Changes in data file should be kept after re-cloning changed_contentjson = new_site.storage.loadJson("content.json") @@ -61,7 +62,7 @@ class TestSite: # Delete created files new_site.storage.deleteFiles() - assert not os.path.isdir("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") + assert not os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") # Delete from site registry assert new_site.address in SiteManager.site_manager.sites diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py index 842cfc82..cd0a4c9f 100644 --- a/src/Test/TestSiteDownload.py +++ b/src/Test/TestSiteDownload.py @@ -3,19 +3,21 @@ import time import pytest import mock import gevent +import gevent.event +import os from Connection import ConnectionServer from Config import config from File import FileRequest from File import FileServer -from Site import Site -import Spy +from Site.Site import Site +from . import Spy @pytest.mark.usefixtures("resetTempSettings") @pytest.mark.usefixtures("resetSettings") class TestSiteDownload: - def testDownload(self, file_server, site, site_temp): + def testRename(self, file_server, site, site_temp): assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address @@ -24,37 +26,107 @@ class TestSiteDownload: file_server.sites[site.address] = site # Init client server - client = ConnectionServer(file_server.ip, 1545) + client = FileServer(file_server.ip, 1545) + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net + site_temp.addPeer(file_server.ip, 1544) + + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + + assert site_temp.storage.isFile("content.json") + + # Rename non-optional file + os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png")) + + site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") + + content = site.storage.loadJson("content.json") + assert "data/img/domain-new.png" in content["files"] + assert "data/img/domain.png" not in content["files"] + assert not site_temp.storage.isFile("data/img/domain-new.png") + assert site_temp.storage.isFile("data/img/domain.png") + settings_before = site_temp.settings + with Spy.Spy(FileRequest, "route") as requests: - def boostRequest(inner_path): - # I really want these file - if inner_path == "index.html": - site_temp.needFile("data/img/multiuser.png", priority=15, blocking=False) - site_temp.needFile("data/img/direct_domains.png", priority=15, blocking=False) - site_temp.onFileDone.append(boostRequest) - site_temp.download(blind_includes=True).join(timeout=5) - file_requests = [request[3]["inner_path"] for request in requests if request[1] in ("getFile", "streamFile")] - # Test priority - assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files - assert file_requests[2:4] == ["data/img/multiuser.png", "data/img/direct_domains.png"] # Directly requested files - assert file_requests[4:6] == ["css/all.css", "js/all.js"] # Important assets - assert file_requests[6] == "dbschema.json" # Database map - assert "-default" in file_requests[-1] # Put default files for cloning to the end + site.publish() + time.sleep(0.1) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + assert "streamFile" not in [req[1] for req in requests] - # Check files - bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] + content = site_temp.storage.loadJson("content.json") + assert "data/img/domain-new.png" in content["files"] + assert "data/img/domain.png" not in content["files"] + assert site_temp.storage.isFile("data/img/domain-new.png") + assert not site_temp.storage.isFile("data/img/domain.png") - # -1 because data/users/1J6... user has invalid cert - assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1 - assert not bad_files + assert site_temp.settings["size"] == settings_before["size"] + assert site_temp.settings["size_optional"] == settings_before["size_optional"] assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections] + def testRenameOptional(self, file_server, site, site_temp): + assert site.storage.directory == config.data_dir + "/" + site.address + assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = FileServer(file_server.ip, 1545) + client.sites = {site_temp.address: site_temp} + site_temp.connection_server = client + site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net + + + site_temp.addPeer(file_server.ip, 1544) + + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + + assert site_temp.settings["optional_downloaded"] == 0 + + site_temp.needFile("data/optional.txt") + + assert site_temp.settings["optional_downloaded"] > 0 + settings_before = site_temp.settings + hashfield_before = site_temp.content_manager.hashfield.tobytes() + + # Rename optional file + os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt")) + + site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True) + + content = site.storage.loadJson("content.json") + assert "data/optional-new.txt" in content["files_optional"] + assert "data/optional.txt" not in content["files_optional"] + assert not site_temp.storage.isFile("data/optional-new.txt") + assert site_temp.storage.isFile("data/optional.txt") + + with Spy.Spy(FileRequest, "route") as requests: + site.publish() + time.sleep(0.1) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + assert "streamFile" not in [req[1] for req in requests] + + content = site_temp.storage.loadJson("content.json") + assert "data/optional-new.txt" in content["files_optional"] + assert "data/optional.txt" not in content["files_optional"] + assert site_temp.storage.isFile("data/optional-new.txt") + assert not site_temp.storage.isFile("data/optional.txt") + + assert site_temp.settings["size"] == settings_before["size"] + assert site_temp.settings["size_optional"] == settings_before["size_optional"] + assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"] + assert site_temp.content_manager.hashfield.tobytes() == hashfield_before + + assert site_temp.storage.deleteFiles() + [connection.close() for connection in file_server.connections] + + def testArchivedDownload(self, file_server, site, site_temp): # Init source server site.connection_server = file_server @@ -62,12 +134,12 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client # Download normally site_temp.addPeer(file_server.ip, 1544) - site_temp.download(blind_includes=True).join(timeout=5) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] assert not bad_files @@ -76,7 +148,7 @@ class TestSiteDownload: assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 # Add archived data - assert not "archived" in site.content_manager.contents["data/users/content.json"]["user_contents"] + assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"] assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()} @@ -91,7 +163,7 @@ class TestSiteDownload: assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] site.publish() time.sleep(0.1) - site_temp.download(blind_includes=True).join(timeout=5) # Wait for download + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download # The archived content should disappear from remote client assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] @@ -110,12 +182,12 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client # Download normally site_temp.addPeer(file_server.ip, 1544) - site_temp.download(blind_includes=True).join(timeout=5) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] assert not bad_files @@ -140,7 +212,7 @@ class TestSiteDownload: assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] site.publish() time.sleep(0.1) - site_temp.download(blind_includes=True).join(timeout=5) # Wait for download + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download # The archived content should disappear from remote client assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] @@ -167,7 +239,7 @@ class TestSiteDownload: site_temp.addPeer(file_server.ip, 1544) # Download site - site_temp.download(blind_includes=True).join(timeout=5) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Download optional data/optional.txt site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -232,7 +304,7 @@ class TestSiteDownload: # Download normal files site_temp.log.info("Start Downloading site") - site_temp.download(blind_includes=True).join(timeout=5) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") @@ -262,7 +334,7 @@ class TestSiteDownload: assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections] - site_full.content_manager.contents.db.close() + site_full.content_manager.contents.db.close("FindOptional test end") def testUpdate(self, file_server, site, site_temp): assert site.storage.directory == config.data_dir + "/" + site.address @@ -274,7 +346,7 @@ class TestSiteDownload: # Init client server client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client # Don't try to find peers from the net @@ -285,11 +357,12 @@ class TestSiteDownload: site_temp.addPeer(file_server.ip, 1544) # Download site from site to site_temp - site_temp.download(blind_includes=True).join(timeout=5) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + assert len(site_temp.bad_files) == 1 # Update file data_original = site.storage.open("data/data.json").read() - data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"') + data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"') assert data_original != data_new site.storage.open("data/data.json", "wb").write(data_new) @@ -303,19 +376,20 @@ class TestSiteDownload: site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") site.publish() time.sleep(0.1) - site_temp.download(blind_includes=True).join(timeout=5) + site.log.info("Downloading site") + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1 assert site_temp.storage.open("data/data.json").read() == data_new # Close connection to avoid update spam limit - site.peers.values()[0].remove() + list(site.peers.values())[0].remove() site.addPeer(file_server.ip, 1545) - site_temp.peers.values()[0].ping() # Connect back + list(site_temp.peers.values())[0].ping() # Connect back time.sleep(0.1) # Update with patch - data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"') + data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') assert data_original != data_new site.storage.open("data/data.json-new", "wb").write(data_new) @@ -328,17 +402,161 @@ class TestSiteDownload: assert not site.storage.isFile("data/data.json-new") # New data file removed assert site.storage.open("data/data.json").read() == data_new # -new postfix removed assert "data/data.json" in diffs - assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] + assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] # Publish with patch site.log.info("Publish new data.json with patch") with Spy.Spy(FileRequest, "route") as requests: site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") + + event_done = gevent.event.AsyncResult() site.publish(diffs=diffs) - site_temp.download(blind_includes=True).join(timeout=5) - assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 0 + time.sleep(0.1) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + assert [request for request in requests if request[1] in ("getFile", "streamFile")] == [] assert site_temp.storage.open("data/data.json").read() == data_new assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections] + + def testBigUpdate(self, file_server, site, site_temp): + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = FileServer(file_server.ip, 1545) + client.sites = {site_temp.address: site_temp} + site_temp.connection_server = client + + # Connect peers + site_temp.addPeer(file_server.ip, 1544) + + # Download site from site to site_temp + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] + + # Update file + data_original = site.storage.open("data/data.json").read() + data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') + assert data_original != data_new + + site.storage.open("data/data.json-new", "wb").write(data_new) + + assert site.storage.open("data/data.json-new").read() == data_new + assert site_temp.storage.open("data/data.json").read() != data_new + + # Generate diff + diffs = site.content_manager.getDiffs("content.json") + assert not site.storage.isFile("data/data.json-new") # New data file removed + assert site.storage.open("data/data.json").read() == data_new # -new postfix removed + assert "data/data.json" in diffs + + content_json = site.storage.loadJson("content.json") + content_json["description"] = "BigZeroBlog" * 1024 * 10 + site.storage.writeJson("content.json", content_json) + site.content_manager.loadContent("content.json", force=True) + + # Publish with patch + site.log.info("Publish new data.json with patch") + with Spy.Spy(FileRequest, "route") as requests: + site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") + assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json + site.publish(diffs=diffs) + time.sleep(0.1) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")] + assert len(file_requests) == 1 + + assert site_temp.storage.open("data/data.json").read() == data_new + assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() + + # Test what happened if the content.json of the site is bigger than the site limit + def testHugeContentSiteUpdate(self, file_server, site, site_temp): + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = FileServer(file_server.ip, 1545) + client.sites = {site_temp.address: site_temp} + site_temp.connection_server = client + + # Connect peers + site_temp.addPeer(file_server.ip, 1544) + + # Download site from site to site_temp + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.settings["size_limit"] = int(20 * 1024 *1024) + site_temp.saveSettings() + + # Raise limit size to 20MB on site so it can be signed + site.settings["size_limit"] = int(20 * 1024 *1024) + site.saveSettings() + + content_json = site.storage.loadJson("content.json") + content_json["description"] = "PartirUnJour" * 1024 * 1024 + site.storage.writeJson("content.json", content_json) + changed, deleted = site.content_manager.loadContent("content.json", force=True) + + # Make sure we have 2 differents content.json + assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read() + + # Generate diff + diffs = site.content_manager.getDiffs("content.json") + + # Publish with patch + site.log.info("Publish new content.json bigger than 10MB") + with Spy.Spy(FileRequest, "route") as requests: + site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") + assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB + time.sleep(0.1) + site.publish(diffs=diffs) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + + assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024 + assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() + + def testUnicodeFilename(self, file_server, site, site_temp): + assert site.storage.directory == config.data_dir + "/" + site.address + assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = FileServer(file_server.ip, 1545) + client.sites = {site_temp.address: site_temp} + site_temp.connection_server = client + site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net + + site_temp.addPeer(file_server.ip, 1544) + + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + + site.storage.write("data/img/árvíztűrő.png", b"test") + + site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") + + content = site.storage.loadJson("content.json") + assert "data/img/árvíztűrő.png" in content["files"] + assert not site_temp.storage.isFile("data/img/árvíztűrő.png") + settings_before = site_temp.settings + + with Spy.Spy(FileRequest, "route") as requests: + site.publish() + time.sleep(0.1) + assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1 + + content = site_temp.storage.loadJson("content.json") + assert "data/img/árvíztűrő.png" in content["files"] + assert site_temp.storage.isFile("data/img/árvíztűrő.png") + + assert site_temp.settings["size"] == settings_before["size"] + assert site_temp.settings["size_optional"] == settings_before["size_optional"] + + assert site_temp.storage.deleteFiles() + [connection.close() for connection in file_server.connections] diff --git a/src/Test/TestSiteStorage.py b/src/Test/TestSiteStorage.py index e9977e8e..f11262bf 100644 --- a/src/Test/TestSiteStorage.py +++ b/src/Test/TestSiteStorage.py @@ -20,3 +20,6 @@ class TestSiteStorage: # Subdir assert set(site.storage.list("data-default")) == set(["data.json", "users"]) + + def testDbRebuild(self, site): + assert site.storage.rebuildDb() diff --git a/src/Test/TestThreadPool.py b/src/Test/TestThreadPool.py new file mode 100644 index 00000000..5e95005e --- /dev/null +++ b/src/Test/TestThreadPool.py @@ -0,0 +1,163 @@ +import time +import threading + +import gevent +import pytest + +from util import ThreadPool + + +class TestThreadPool: + def testExecutionOrder(self): + with ThreadPool.ThreadPool(4) as pool: + events = [] + + @pool.wrap + def blocker(): + events.append("S") + out = 0 + for i in range(10000000): + if i == 3000000: + events.append("M") + out += 1 + events.append("D") + return out + + threads = [] + for i in range(3): + threads.append(gevent.spawn(blocker)) + gevent.joinall(threads) + + assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3 + + res = blocker() + assert res == 10000000 + + def testLockBlockingSameThread(self): + lock = ThreadPool.Lock() + + s = time.time() + + def unlocker(): + time.sleep(1) + lock.release() + + gevent.spawn(unlocker) + lock.acquire(True) + lock.acquire(True, timeout=2) + + unlock_taken = time.time() - s + + assert 1.0 < unlock_taken < 1.5 + + def testLockBlockingDifferentThread(self): + lock = ThreadPool.Lock() + + def locker(): + lock.acquire(True) + time.sleep(0.5) + lock.release() + + with ThreadPool.ThreadPool(10) as pool: + threads = [ + pool.spawn(locker), + pool.spawn(locker), + gevent.spawn(locker), + pool.spawn(locker) + ] + time.sleep(0.1) + + s = time.time() + + lock.acquire(True, 5.0) + + unlock_taken = time.time() - s + + assert 1.8 < unlock_taken < 2.2 + + gevent.joinall(threads) + + def testMainLoopCallerThreadId(self): + main_thread_id = threading.current_thread().ident + with ThreadPool.ThreadPool(5) as pool: + def getThreadId(*args, **kwargs): + return threading.current_thread().ident + + t = pool.spawn(getThreadId) + assert t.get() != main_thread_id + + t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId)) + assert t.get() == main_thread_id + + def testMainLoopCallerGeventSpawn(self): + main_thread_id = threading.current_thread().ident + with ThreadPool.ThreadPool(5) as pool: + def waiter(): + time.sleep(1) + return threading.current_thread().ident + + def geventSpawner(): + event = ThreadPool.main_loop.call(gevent.spawn, waiter) + + with pytest.raises(Exception) as greenlet_err: + event.get() + assert str(greenlet_err.value) == "cannot switch to a different thread" + + waiter_thread_id = ThreadPool.main_loop.call(event.get) + return waiter_thread_id + + s = time.time() + waiter_thread_id = pool.apply(geventSpawner) + assert main_thread_id == waiter_thread_id + time_taken = time.time() - s + assert 0.9 < time_taken < 1.2 + + def testEvent(self): + with ThreadPool.ThreadPool(5) as pool: + event = ThreadPool.Event() + + def setter(): + time.sleep(1) + event.set("done!") + + def getter(): + return event.get() + + pool.spawn(setter) + t_gevent = gevent.spawn(getter) + t_pool = pool.spawn(getter) + s = time.time() + assert event.get() == "done!" + time_taken = time.time() - s + gevent.joinall([t_gevent, t_pool]) + + assert t_gevent.get() == "done!" + assert t_pool.get() == "done!" + + assert 0.9 < time_taken < 1.2 + + with pytest.raises(Exception) as err: + event.set("another result") + + assert "Event already has value" in str(err.value) + + def testMemoryLeak(self): + import gc + thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))] + + def worker(): + time.sleep(0.1) + return "ok" + + def poolTest(): + with ThreadPool.ThreadPool(5) as pool: + for i in range(20): + pool.spawn(worker) + + for i in range(5): + poolTest() + new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before] + #print("New objs:", new_thread_objs, "run:", num_run) + + # Make sure no threadpool object left behind + assert not new_thread_objs diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py index 9479aa2f..e6b82c1a 100644 --- a/src/Test/TestTor.py +++ b/src/Test/TestTor.py @@ -4,7 +4,7 @@ import pytest import mock from File import FileServer -from Crypt import CryptRsa +from Crypt import CryptTor from Config import config @pytest.mark.usefixtures("resetSettings") @@ -34,17 +34,17 @@ class TestTor: address = tor_manager.addOnion() # Sign - sign = CryptRsa.sign("hello", tor_manager.getPrivatekey(address)) + sign = CryptTor.sign(b"hello", tor_manager.getPrivatekey(address)) assert len(sign) == 128 # Verify - publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address)) + publickey = CryptTor.privatekeyToPublickey(tor_manager.getPrivatekey(address)) assert len(publickey) == 140 - assert CryptRsa.verify("hello", publickey, sign) - assert not CryptRsa.verify("not hello", publickey, sign) + assert CryptTor.verify(b"hello", publickey, sign) + assert not CryptTor.verify(b"not hello", publickey, sign) # Pub to address - assert CryptRsa.publickeyToOnion(publickey) == address + assert CryptTor.publickeyToOnion(publickey) == address # Delete tor_manager.delOnion(address) @@ -54,7 +54,7 @@ class TestTor: file_server.tor_manager.start_onions = True address = file_server.tor_manager.getOnion(site.address) assert address - print "Connecting to", address + print("Connecting to", address) for retry in range(5): # Wait for hidden service creation time.sleep(10) try: @@ -117,7 +117,7 @@ class TestTor: file_server.tor_manager = tor_manager client = FileServer(file_server.ip, 1545) - client.sites[site_temp.address] = site_temp + client.sites = {site_temp.address: site_temp} site_temp.connection_server = client # Add file_server as peer to client @@ -135,17 +135,17 @@ class TestTor: fake_peer_3.hashfield.append(1235) fake_peer_3.hashfield.append(1236) - assert peer_file_server.findHashIds([1234, 1235]) == { - 1234: [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)], - 1235: [('1.2.3.6', 1546), ('1.2.3.5', 1545)] - } + res = peer_file_server.findHashIds([1234, 1235]) + + assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)] + assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] # Test my address adding site.content_manager.hashfield.append(1234) res = peer_file_server.findHashIds([1234, 1235]) - assert res[1234] == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544), (file_server.ip, 1544)] - assert res[1235] == [('1.2.3.6', 1546), ('1.2.3.5', 1545)] + assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)] + assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] def testSiteOnion(self, tor_manager): with mock.patch.object(config, "tor", "always"): diff --git a/src/Test/TestTranslate.py b/src/Test/TestTranslate.py index 530d1bcf..348a65a6 100644 --- a/src/Test/TestTranslate.py +++ b/src/Test/TestTranslate.py @@ -1,5 +1,3 @@ -import os - from Translate import Translate class TestTranslate: @@ -13,7 +11,6 @@ class TestTranslate: assert 'translated = _("translated")' in data_translated assert 'not_translated = "original"' in data_translated - def testTranslateStrictNamed(self): translate = Translate() data = """ @@ -25,6 +22,15 @@ class TestTranslate: assert 'translated = _("translated")' in data_translated assert 'not_translated = "original"' in data_translated + def testTranslateUtf8(self): + translate = Translate() + data = """ + greeting = "Hi again árvztűrőtökörfúrógép!" + """ + data_translated = translate.translateData(data, {"Hi again árvztűrőtökörfúrógép!": "Üdv újra árvztűrőtökörfúrógép!"}) + assert data_translated == """ + greeting = "Üdv újra árvztűrőtökörfúrógép!" + """ def testTranslateEscape(self): _ = Translate() @@ -36,7 +42,7 @@ class TestTranslate: data_translated = _(data) assert 'Szia' in data_translated assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" + assert data_translated == "Szia Hacker<script>alert('boom')</script>!" # Escaping dicts user = {"username": "Hacker"} @@ -44,7 +50,7 @@ class TestTranslate: data_translated = _(data) assert 'Szia' in data_translated assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" + assert data_translated == "Szia Hacker<script>alert('boom')</script>!" # Escaping lists users = [{"username": "Hacker"}] @@ -52,4 +58,4 @@ class TestTranslate: data_translated = _(data) assert 'Szia' in data_translated assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" + assert data_translated == "Szia Hacker<script>alert('boom')</script>!" diff --git a/src/Test/TestUiWebsocket.py b/src/Test/TestUiWebsocket.py new file mode 100644 index 00000000..d2d23d03 --- /dev/null +++ b/src/Test/TestUiWebsocket.py @@ -0,0 +1,11 @@ +import sys +import pytest + +@pytest.mark.usefixtures("resetSettings") +class TestUiWebsocket: + def testPermission(self, ui_websocket): + res = ui_websocket.testAction("ping") + assert res == "pong" + + res = ui_websocket.testAction("certList") + assert "You don't have permission" in res["error"] diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py index 18338bb9..f17c77bd 100644 --- a/src/Test/TestUpnpPunch.py +++ b/src/Test/TestUpnpPunch.py @@ -1,5 +1,5 @@ import socket -from urlparse import urlparse +from urllib.parse import urlparse import pytest import mock @@ -10,7 +10,7 @@ from util import UpnpPunch as upnp @pytest.fixture def mock_socket(): mock_socket = mock.MagicMock() - mock_socket.recv = mock.MagicMock(return_value='Hello') + mock_socket.recv = mock.MagicMock(return_value=b'Hello') mock_socket.bind = mock.MagicMock() mock_socket.send_to = mock.MagicMock() @@ -79,12 +79,12 @@ class TestUpnpPunch(object): upnp._retrieve_location_from_ssdp(rsp) def test_retrieve_igd_profile(self, url_obj): - with mock.patch('urllib2.urlopen') as mock_urlopen: + with mock.patch('urllib.request.urlopen') as mock_urlopen: upnp._retrieve_igd_profile(url_obj) mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5) def test_retrieve_igd_profile_timeout(self, url_obj): - with mock.patch('urllib2.urlopen') as mock_urlopen: + with mock.patch('urllib.request.urlopen') as mock_urlopen: mock_urlopen.side_effect = socket.error('Timeout error') with pytest.raises(upnp.IGDError): upnp._retrieve_igd_profile(url_obj) @@ -126,9 +126,9 @@ class TestUpnpPunch(object): def test_parse_for_errors_bad_rsp(self, httplib_response): rsp = httplib_response(status=500) - with pytest.raises(upnp.IGDError) as exc: + with pytest.raises(upnp.IGDError) as err: upnp._parse_for_errors(rsp) - assert 'Unable to parse' in str(exc) + assert 'Unable to parse' in str(err.value) def test_parse_for_errors_error(self, httplib_response): soap_error = ('' @@ -136,9 +136,9 @@ class TestUpnpPunch(object): 'Bad request' '') rsp = httplib_response(status=500, body=soap_error) - with pytest.raises(upnp.IGDError) as exc: + with pytest.raises(upnp.IGDError) as err: upnp._parse_for_errors(rsp) - assert 'SOAP request error' in str(exc) + assert 'SOAP request error' in str(err.value) def test_parse_for_errors_good_rsp(self, httplib_response): rsp = httplib_response(status=200) @@ -176,7 +176,7 @@ class TestUpnpPunch(object): soap_msg = mock_send_requests.call_args[0][0][0][0] - assert result is None + assert result is True assert mock_collect_idg.called assert '192.168.0.12' in soap_msg diff --git a/src/Test/TestUser.py b/src/Test/TestUser.py index 1fcdd1b7..e5ec5c8c 100644 --- a/src/Test/TestUser.py +++ b/src/Test/TestUser.py @@ -7,7 +7,7 @@ from Crypt import CryptBitcoin class TestUser: def testAddress(self, user): assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc" - address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811L + address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811 assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index # Re-generate privatekey based on address_index diff --git a/src/Test/TestWeb.py b/src/Test/TestWeb.py index 10e1829d..2ce66c98 100644 --- a/src/Test/TestWeb.py +++ b/src/Test/TestWeb.py @@ -1,4 +1,4 @@ -import urllib +import urllib.request import pytest @@ -26,7 +26,7 @@ def getContextUrl(browser): def getUrl(url): - content = urllib.urlopen(url).read() + content = urllib.request.urlopen(url).read() assert "server error" not in content.lower(), "Got a server error! " + repr(url) return content diff --git a/src/Test/TestWorkerTaskManager.py b/src/Test/TestWorkerTaskManager.py new file mode 100644 index 00000000..eb5c4a2a --- /dev/null +++ b/src/Test/TestWorkerTaskManager.py @@ -0,0 +1,128 @@ +import pytest + +from Worker import WorkerTaskManager +from . import Spy + + +class TestUiWebsocket: + def checkSort(self, tasks): # Check if it has the same order as a list sorted separately + tasks_list = list(tasks) + tasks_list.sort(key=lambda task: task["id"]) + assert tasks_list != list(tasks) + tasks_list.sort(key=lambda task: (0 - (task["priority"] - task["workers_num"] * 10), task["id"])) + assert tasks_list == list(tasks) + + def testAppendSimple(self): + tasks = WorkerTaskManager.WorkerTaskManager() + tasks.append({"id": 1, "priority": 15, "workers_num": 1, "inner_path": "file1.json"}) + tasks.append({"id": 2, "priority": 1, "workers_num": 0, "inner_path": "file2.json"}) + tasks.append({"id": 3, "priority": 8, "workers_num": 0, "inner_path": "file3.json"}) + assert [task["inner_path"] for task in tasks] == ["file3.json", "file1.json", "file2.json"] + + self.checkSort(tasks) + + def testAppendMany(self): + tasks = WorkerTaskManager.WorkerTaskManager() + for i in range(1000): + tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) + assert tasks[0]["inner_path"] == "file39.json" + assert tasks[-1]["inner_path"] == "file980.json" + + self.checkSort(tasks) + + def testRemove(self): + tasks = WorkerTaskManager.WorkerTaskManager() + for i in range(1000): + tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) + + i = 333 + task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i} + assert task in tasks + + with Spy.Spy(tasks, "indexSlow") as calls: + tasks.remove(task) + assert len(calls) == 0 + + assert task not in tasks + + # Remove non existent item + with Spy.Spy(tasks, "indexSlow") as calls: + with pytest.raises(ValueError): + tasks.remove(task) + assert len(calls) == 0 + + self.checkSort(tasks) + + def testRemoveAll(self): + tasks = WorkerTaskManager.WorkerTaskManager() + tasks_list = [] + for i in range(1000): + task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i} + tasks.append(task) + tasks_list.append(task) + + for task in tasks_list: + tasks.remove(task) + + assert len(tasks.inner_paths) == 0 + assert len(tasks) == 0 + + def testModify(self): + tasks = WorkerTaskManager.WorkerTaskManager() + for i in range(1000): + tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) + + task = tasks[333] + task["priority"] += 10 + + with pytest.raises(AssertionError): + self.checkSort(tasks) + + with Spy.Spy(tasks, "indexSlow") as calls: + tasks.updateItem(task) + assert len(calls) == 1 + + assert task in tasks + + self.checkSort(tasks) + + # Check reorder optimization + with Spy.Spy(tasks, "indexSlow") as calls: + tasks.updateItem(task, "priority", task["priority"] + 10) + assert len(calls) == 0 + + with Spy.Spy(tasks, "indexSlow") as calls: + tasks.updateItem(task, "priority", task["workers_num"] - 1) + assert len(calls) == 0 + + self.checkSort(tasks) + + def testModifySamePriority(self): + tasks = WorkerTaskManager.WorkerTaskManager() + for i in range(1000): + tasks.append({"id": i, "priority": 10, "workers_num": 5, "inner_path": "file%s.json" % i}) + + task = tasks[333] + + # Check reorder optimization + with Spy.Spy(tasks, "indexSlow") as calls: + tasks.updateItem(task, "priority", task["workers_num"] - 1) + assert len(calls) == 0 + + def testIn(self): + tasks = WorkerTaskManager.WorkerTaskManager() + + i = 1 + task = {"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i} + + assert task not in tasks + + def testFindTask(self): + tasks = WorkerTaskManager.WorkerTaskManager() + for i in range(1000): + tasks.append({"id": i, "priority": i % 20, "workers_num": i % 3, "inner_path": "file%s.json" % i}) + + assert tasks.findTask("file999.json") + assert not tasks.findTask("file-unknown.json") + tasks.remove(tasks.findTask("file999.json")) + assert not tasks.findTask("file999.json") diff --git a/src/Test/conftest.py b/src/Test/conftest.py index 01f1e45d..c8739086 100644 --- a/src/Test/conftest.py +++ b/src/Test/conftest.py @@ -1,23 +1,35 @@ import os import sys -import urllib +import urllib.request import time import logging import json import shutil import gc import datetime +import atexit +import threading +import socket import pytest import mock import gevent +if "libev" not in str(gevent.config.loop): + # Workaround for random crash when libuv used with threads + gevent.config.loop = "libev-cext" + +import gevent.event from gevent import monkey monkey.patch_all(thread=False, subprocess=False) +atexit_register = atexit.register +atexit.register = lambda func: "" # Don't register shutdown functions to avoid IO error on exit + def pytest_addoption(parser): parser.addoption("--slow", action='store_true', default=False, help="Also run slow tests") + def pytest_collection_modifyitems(config, items): if config.getoption("--slow"): # --runslow given in cli: do not skip slow tests @@ -34,70 +46,106 @@ else: CHROMEDRIVER_PATH = "chromedriver" SITE_URL = "http://127.0.0.1:43110" +TEST_DATA_PATH = 'src/Test/testdata' sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/../lib")) # External modules directory sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + "/..")) # Imports relative to src dir from Config import config config.argv = ["none"] # Dont pass any argv to config parser -config.parse(silent=True) # Plugins need to access the configuration +config.parse(silent=True, parse_config=False) # Plugins need to access the configuration config.action = "test" -logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) - -# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log) -class TimeFilter(logging.Filter): - - def filter(self, record): - try: - last = self.last - except AttributeError: - last = record.relativeCreated - - delta = datetime.datetime.fromtimestamp(record.relativeCreated/1000.0) - datetime.datetime.fromtimestamp(last/1000.0) - - record.relative = '{0:.3f}'.format(delta.seconds + delta.microseconds/1000000.0) - - self.last = record.relativeCreated - return True - -log = logging.getLogger() -fmt = logging.Formatter(fmt='+%(relative)ss %(levelname)-8s %(name)s %(message)s') -[hndl.addFilter(TimeFilter()) for hndl in log.handlers] -[hndl.setFormatter(fmt) for hndl in log.handlers] - # Load plugins from Plugin import PluginManager -config.data_dir = "src/Test/testdata" # Use test data for unittests +config.data_dir = TEST_DATA_PATH # Use test data for unittests +config.debug = True os.chdir(os.path.abspath(os.path.dirname(__file__) + "/../..")) # Set working dir -# Cleanup content.db caches -if os.path.isfile("%s/content.db" % config.data_dir): - os.unlink("%s/content.db" % config.data_dir) -if os.path.isfile("%s-temp/content.db" % config.data_dir): - os.unlink("%s-temp/content.db" % config.data_dir) -PluginManager.plugin_manager.loadPlugins() +all_loaded = PluginManager.plugin_manager.loadPlugins() +assert all_loaded, "Not all plugin loaded successfully" + config.loadPlugins() -config.parse() # Parse again to add plugin configuration options +config.parse(parse_config=False) # Parse again to add plugin configuration options +config.action = "test" +config.debug = True config.debug_socket = True # Use test data for unittests config.verbose = True # Use test data for unittests config.tor = "disable" # Don't start Tor client config.trackers = [] -config.data_dir = "src/Test/testdata" # Use test data for unittests +config.data_dir = TEST_DATA_PATH # Use test data for unittests +if "ZERONET_LOG_DIR" in os.environ: + config.log_dir = os.environ["ZERONET_LOG_DIR"] +config.initLogging(console_logging=False) -from Site import Site +# Set custom formatter with realative time format (via: https://stackoverflow.com/questions/31521859/python-logging-module-time-since-last-log) +time_start = time.time() +class TimeFilter(logging.Filter): + def __init__(self, *args, **kwargs): + self.time_last = time.time() + self.main_thread_id = threading.current_thread().ident + super().__init__(*args, **kwargs) + + def filter(self, record): + if threading.current_thread().ident != self.main_thread_id: + record.thread_marker = "T" + record.thread_title = "(Thread#%s)" % self.main_thread_id + else: + record.thread_marker = " " + record.thread_title = "" + + since_last = time.time() - self.time_last + if since_last > 0.1: + line_marker = "!" + elif since_last > 0.02: + line_marker = "*" + elif since_last > 0.01: + line_marker = "-" + else: + line_marker = " " + + since_start = time.time() - time_start + record.since_start = "%s%.3fs" % (line_marker, since_start) + + self.time_last = time.time() + return True + +log = logging.getLogger() +fmt = logging.Formatter(fmt='%(since_start)s %(thread_marker)s %(levelname)-8s %(name)s %(message)s %(thread_title)s') +[hndl.addFilter(TimeFilter()) for hndl in log.handlers] +[hndl.setFormatter(fmt) for hndl in log.handlers] + +from Site.Site import Site from Site import SiteManager from User import UserManager from File import FileServer from Connection import ConnectionServer from Crypt import CryptConnection +from Crypt import CryptBitcoin from Ui import UiWebsocket from Tor import TorManager from Content import ContentDb from util import RateLimit from Db import Db +from Debug import Debug + +gevent.get_hub().NOT_ERROR += (Debug.Notify,) + +def cleanup(): + Db.dbCloseAll() + for dir_path in [config.data_dir, config.data_dir + "-temp"]: + if os.path.isdir(dir_path): + for file_name in os.listdir(dir_path): + ext = file_name.rsplit(".", 1)[-1] + if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]: + continue + file_path = dir_path + "/" + file_name + if os.path.isfile(file_path): + os.unlink(file_path) + +atexit_register(cleanup) @pytest.fixture(scope="session") def resetSettings(request): @@ -113,6 +161,7 @@ def resetSettings(request): } """) + @pytest.fixture(scope="session") def resetTempSettings(request): data_dir_temp = config.data_dir + "-temp" @@ -156,10 +205,9 @@ def site(request): site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net def cleanup(): - site.storage.deleteFiles() - site.content_manager.contents.db.deleteSite(site) - del SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] - site.content_manager.contents.db.close() + site.delete() + site.content_manager.contents.db.close("Test cleanup") + site.content_manager.contents.db.timer_check_optional.kill() SiteManager.site_manager.sites.clear() db_path = "%s/content.db" % config.data_dir os.unlink(db_path) @@ -167,10 +215,12 @@ def site(request): gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) request.addfinalizer(cleanup) + site.greenlet_manager.stopGreenlets() site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files if not SiteManager.site_manager.sites: SiteManager.site_manager.sites = {} SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site + site.settings["serving"] = True return site @@ -179,23 +229,27 @@ def site_temp(request): threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)] with mock.patch("Config.config.data_dir", config.data_dir + "-temp"): site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") + site_temp.settings["serving"] = True site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net def cleanup(): - site_temp.storage.deleteFiles() - site_temp.content_manager.contents.db.deleteSite(site_temp) - site_temp.content_manager.contents.db.close() + site_temp.delete() + site_temp.content_manager.contents.db.close("Test cleanup") + site_temp.content_manager.contents.db.timer_check_optional.kill() db_path = "%s-temp/content.db" % config.data_dir os.unlink(db_path) del ContentDb.content_dbs[db_path] gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) request.addfinalizer(cleanup) + site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short) return site_temp @pytest.fixture(scope="session") def user(): user = UserManager.user_manager.get() + if not user: + user = UserManager.user_manager.create() user.sites = {} # Reset user data return user @@ -204,7 +258,7 @@ def user(): def browser(request): try: from selenium import webdriver - print "Starting chromedriver..." + print("Starting chromedriver...") options = webdriver.chrome.options.Options() options.add_argument("--headless") options.add_argument("--window-size=1920x1080") @@ -214,7 +268,7 @@ def browser(request): def quit(): browser.quit() request.addfinalizer(quit) - except Exception, err: + except Exception as err: raise pytest.skip("Test requires selenium + chromedriver: %s" % err) return browser @@ -222,8 +276,8 @@ def browser(request): @pytest.fixture(scope="session") def site_url(): try: - urllib.urlopen(SITE_URL).read() - except Exception, err: + urllib.request.urlopen(SITE_URL).read() + except Exception as err: raise pytest.skip("Test requires zeronet client running: %s" % err) return SITE_URL @@ -238,6 +292,7 @@ def file_server(request): @pytest.fixture def file_server4(request): + time.sleep(0.1) file_server = FileServer("127.0.0.1", 1544) file_server.ip_external = "1.2.3.4" # Fake external ip @@ -253,8 +308,8 @@ def file_server4(request): conn = file_server.getConnection("127.0.0.1", 1544) conn.close() break - except Exception, err: - print err + except Exception as err: + print("FileServer6 startup error", Debug.formatException(err)) assert file_server.running file_server.ip_incoming = {} # Reset flood protection @@ -263,8 +318,20 @@ def file_server4(request): request.addfinalizer(stop) return file_server + @pytest.fixture def file_server6(request): + try: + sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) + sock.connect(("::1", 80, 1, 1)) + has_ipv6 = True + except OSError: + has_ipv6 = False + if not has_ipv6: + pytest.skip("Ipv6 not supported") + + + time.sleep(0.1) file_server6 = FileServer("::1", 1544) file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip @@ -280,8 +347,8 @@ def file_server6(request): conn = file_server6.getConnection("::1", 1544) conn.close() break - except Exception, err: - print err + except Exception as err: + print("FileServer6 startup error", Debug.formatException(err)) assert file_server6.running file_server6.ip_incoming = {} # Reset flood protection @@ -290,22 +357,30 @@ def file_server6(request): request.addfinalizer(stop) return file_server6 + @pytest.fixture() -def ui_websocket(site, file_server, user): +def ui_websocket(site, user): class WsMock: def __init__(self): - self.result = None + self.result = gevent.event.AsyncResult() def send(self, data): - self.result = json.loads(data)["result"] + logging.debug("WsMock: Set result (data: %s) called by %s" % (data, Debug.formatStack())) + self.result.set(json.loads(data)["result"]) + + def getResult(self): + logging.debug("WsMock: Get result") + back = self.result.get() + logging.debug("WsMock: Got result (data: %s)" % back) + self.result = gevent.event.AsyncResult() + return back ws_mock = WsMock() - ui_websocket = UiWebsocket(ws_mock, site, file_server, user, None) + ui_websocket = UiWebsocket(ws_mock, site, None, user, None) def testAction(action, *args, **kwargs): - func = getattr(ui_websocket, "action%s" % action) - func(0, *args, **kwargs) - return ui_websocket.ws.result + ui_websocket.handleRequest({"id": 0, "cmd": action, "params": list(args) if args else kwargs}) + return ui_websocket.ws.getResult() ui_websocket.testAction = testAction return ui_websocket @@ -314,14 +389,15 @@ def ui_websocket(site, file_server, user): @pytest.fixture(scope="session") def tor_manager(): try: - tor_manager = TorManager() + tor_manager = TorManager(fileserver_port=1544) tor_manager.start() - assert tor_manager.conn + assert tor_manager.conn is not None tor_manager.startOnions() - except Exception, err: + except Exception as err: raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err)) return tor_manager + @pytest.fixture() def db(request): db_path = "%s/zeronet.db" % config.data_dir @@ -360,12 +436,62 @@ def db(request): if os.path.isfile(db_path): os.unlink(db_path) - db = Db(schema, db_path) + db = Db.Db(schema, db_path) db.checkTables() def stop(): - db.close() + db.close("Test db cleanup") os.unlink(db_path) request.addfinalizer(stop) return db + + +@pytest.fixture(params=["sslcrypto", "sslcrypto_fallback", "libsecp256k1"]) +def crypt_bitcoin_lib(request, monkeypatch): + monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param) + CryptBitcoin.loadLib(request.param) + return CryptBitcoin + +@pytest.fixture(scope='function', autouse=True) +def logCaseStart(request): + global time_start + time_start = time.time() + logging.debug("---- Start test case: %s ----" % request._pyfuncitem) + yield None # Wait until all test done + + +# Workaround for pytest bug when logging in atexit/post-fixture handlers (I/O operation on closed file) +def workaroundPytestLogError(): + import _pytest.capture + write_original = _pytest.capture.EncodedFile.write + + def write_patched(obj, *args, **kwargs): + try: + write_original(obj, *args, **kwargs) + except ValueError as err: + if str(err) == "I/O operation on closed file": + pass + else: + raise err + + def flush_patched(obj, *args, **kwargs): + try: + obj.buffer.flush(*args, **kwargs) + except ValueError as err: + if str(err).startswith("I/O operation on closed file"): + pass + else: + raise err + + _pytest.capture.EncodedFile.write = write_patched + _pytest.capture.EncodedFile.flush = flush_patched + + +workaroundPytestLogError() + +@pytest.fixture(scope='session', autouse=True) +def disableLog(): + yield None # Wait until all test done + logging.getLogger('').setLevel(logging.getLevelName(logging.CRITICAL)) + diff --git a/src/Test/pytest.ini b/src/Test/pytest.ini index d09210d1..0ffb385f 100644 --- a/src/Test/pytest.ini +++ b/src/Test/pytest.ini @@ -1,5 +1,6 @@ [pytest] python_files = Test*.py -addopts = -rsxX -v --durations=6 +addopts = -rsxX -v --durations=6 --capture=fd markers = - webtest: mark a test as a webtest. \ No newline at end of file + slow: mark a tests as slow. + webtest: mark a test as a webtest. diff --git a/src/Tor/TorManager.py b/src/Tor/TorManager.py index 70d6c015..865d8fbf 100644 --- a/src/Tor/TorManager.py +++ b/src/Tor/TorManager.py @@ -12,14 +12,13 @@ import atexit import gevent from Config import config -from Crypt import CryptRsa + +from lib import Ed25519 +from Crypt import CryptTor + from Site import SiteManager -from lib.PySocks import socks -try: - from gevent.coros import RLock -except: - from gevent.lock import RLock -from util import helper +import socks +from gevent.lock import RLock from Debug import Debug from Plugin import PluginManager @@ -37,6 +36,8 @@ class TorManager(object): self.conn = None self.lock = RLock() self.starting = True + self.connecting = True + self.status = None self.event_started = gevent.event.AsyncResult() if config.tor == "disable": @@ -63,35 +64,33 @@ class TorManager(object): self.starting = True try: if not self.connect(): - raise Exception("No connection") + raise Exception(self.status) self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) - except Exception, err: - if sys.platform.startswith("win"): - self.log.info(u"Starting self-bundled Tor, due to Tor proxy port %s check error: %s" % (config.tor_proxy, err)) + except Exception as err: + if sys.platform.startswith("win") and os.path.isfile(self.tor_exe): + self.log.info("Starting self-bundled Tor, due to Tor proxy port %s check error: %s" % (config.tor_proxy, err)) + # Change to self-bundled Tor ports + self.port = 49051 + self.proxy_port = 49050 + if config.tor == "always": + socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) + self.enabled = True + if not self.connect(): + self.startTor() else: - self.log.info(u"Disabling Tor, because error while accessing Tor proxy at port %s: %s" % (config.tor_proxy, err)) - self.enabled = False - # Change to self-bundled Tor ports - from lib.PySocks import socks - self.port = 49051 - self.proxy_port = 49050 - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) - if os.path.isfile(self.tor_exe): # Already, downloaded: sync mode - self.startTor() - else: # Not downloaded yet: Async mode - gevent.spawn(self.startTor) + self.log.info("Disabling Tor, because error while accessing Tor proxy at port %s: %s" % (config.tor_proxy, err)) + self.enabled = False def setStatus(self, status): self.status = status - if "ui_server" in dir(sys.modules.get("main", {})): - sys.modules["main"].ui_server.updateWebsocket() + if "main" in sys.modules: # import main has side-effects, breaks tests + import main + if "ui_server" in dir(main): + main.ui_server.updateWebsocket() def startTor(self): if sys.platform.startswith("win"): try: - if not os.path.isfile(self.tor_exe): - self.downloadTor() - self.log.info("Starting Tor client %s..." % self.tor_exe) tor_dir = os.path.dirname(self.tor_exe) startupinfo = subprocess.STARTUPINFO() @@ -101,7 +100,7 @@ class TorManager(object): cmd += " --UseBridges 1" self.tor_process = subprocess.Popen(cmd, cwd=tor_dir, close_fds=True, startupinfo=startupinfo) - for wait in range(1, 10): # Wait for startup + for wait in range(1, 3): # Wait for startup time.sleep(wait * 0.5) self.enabled = True if self.connect(): @@ -110,8 +109,8 @@ class TorManager(object): break # Terminate on exit atexit.register(self.stopTor) - except Exception, err: - self.log.error(u"Error starting Tor client: %s" % Debug.formatException(str(err).decode("utf8", "ignore"))) + except Exception as err: + self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err))) self.enabled = False self.starting = False self.event_started.set(False) @@ -125,51 +124,9 @@ class TorManager(object): try: if self.isSubprocessRunning(): self.request("SIGNAL SHUTDOWN") - except Exception, err: + except Exception as err: self.log.error("Error stopping Tor: %s" % err) - def downloadTor(self): - self.log.info("Downloading Tor...") - # Check Tor webpage for link - download_page = helper.httpRequest("https://www.torproject.org/download/download.html").read() - download_url = re.search('href="(.*?tor.*?win32.*?zip)"', download_page).group(1) - if not download_url.startswith("http"): - download_url = "https://www.torproject.org/download/" + download_url - - # Download Tor client - self.log.info("Downloading %s" % download_url) - data = helper.httpRequest(download_url, as_file=True) - data_size = data.tell() - - # Handle redirect - if data_size < 1024 and "The document has moved" in data.getvalue(): - download_url = re.search('href="(.*?tor.*?win32.*?zip)"', data.getvalue()).group(1) - data = helper.httpRequest(download_url, as_file=True) - data_size = data.tell() - - if data_size > 1024: - import zipfile - zip = zipfile.ZipFile(data) - self.log.info("Unpacking Tor") - for inner_path in zip.namelist(): - if ".." in inner_path: - continue - dest_path = inner_path - dest_path = re.sub("^Data/Tor/", "tools/tor/data/", dest_path) - dest_path = re.sub("^Data/", "tools/tor/data/", dest_path) - dest_path = re.sub("^Tor/", "tools/tor/", dest_path) - dest_dir = os.path.dirname(dest_path) - if dest_dir and not os.path.isdir(dest_dir): - os.makedirs(dest_dir) - - if dest_dir != dest_path.strip("/"): - data = zip.read(inner_path) - if not os.path.isfile(dest_path): - open(dest_path, 'wb').write(data) - else: - self.log.error("Bad response from server: %s" % data.getvalue()) - return False - def connect(self): if not self.enabled: return False @@ -184,7 +141,8 @@ class TorManager(object): else: conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.log.info("Connecting to Tor Controller %s:%s" % (self.ip, self.port)) + self.log.debug("Connecting to Tor Controller %s:%s" % (self.ip, self.port)) + self.connecting = True try: with self.lock: conn.connect((self.ip, self.port)) @@ -196,33 +154,39 @@ class TorManager(object): if config.tor_password: res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn) elif cookie_match: - cookie_file = cookie_match.group(1).decode("string-escape") + cookie_file = cookie_match.group(1).encode("ascii").decode("unicode_escape") + if not os.path.isfile(cookie_file) and self.tor_process: + # Workaround for tor client cookie auth file utf8 encoding bug (https://github.com/torproject/stem/issues/57) + cookie_file = os.path.dirname(self.tor_exe) + "\\data\\control_auth_cookie" auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) - res_auth = self.send("AUTHENTICATE %s" % auth_hex, conn) + res_auth = self.send("AUTHENTICATE %s" % auth_hex.decode("utf8"), conn) else: res_auth = self.send("AUTHENTICATE", conn) - assert "250 OK" in res_auth, "Authenticate error %s" % res_auth + if "250 OK" not in res_auth: + raise Exception("Authenticate error %s" % res_auth) # Version 0.2.7.5 required because ADD_ONION support res_version = self.send("GETINFO version", conn) - version = re.search('version=([0-9\.]+)', res_version).group(1) - assert float(version.replace(".", "0", 2)) >= 207.5, "Tor version >=0.2.7.5 required, found: %s" % version + version = re.search(r'version=([0-9\.]+)', res_version).group(1) + if float(version.replace(".", "0", 2)) < 207.5: + raise Exception("Tor version >=0.2.7.5 required, found: %s" % version) - self.setStatus(u"Connected (%s)" % res_auth) + self.setStatus("Connected (%s)" % res_auth) self.event_started.set(True) self.starting = False self.connecting = False self.conn = conn - except Exception, err: + except Exception as err: self.conn = None - self.setStatus(u"Error (%s)" % str(err).decode("utf8", "ignore")) - self.log.error(u"Tor controller connect error: %s" % Debug.formatException(str(err).decode("utf8", "ignore"))) + self.setStatus("Error (%s)" % str(err)) + self.log.warning("Tor controller connect error: %s" % Debug.formatException(str(err))) self.enabled = False return self.conn def disconnect(self): - self.conn.close() + if self.conn: + self.conn.close() self.conn = None def startOnions(self): @@ -235,31 +199,31 @@ class TorManager(object): def resetCircuits(self): res = self.request("SIGNAL NEWNYM") if "250 OK" not in res: - self.setStatus(u"Reset circuits error (%s)" % res) + self.setStatus("Reset circuits error (%s)" % res) self.log.error("Tor reset circuits error: %s" % res) def addOnion(self): if len(self.privatekeys) >= config.tor_hs_limit: - return random.choice([key for key in self.privatekeys.keys() if key != self.site_onions.get("global")]) + return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")]) result = self.makeOnionAndKey() if result: onion_address, onion_privatekey = result self.privatekeys[onion_address] = onion_privatekey - self.setStatus(u"OK (%s onions running)" % len(self.privatekeys)) + self.setStatus("OK (%s onions running)" % len(self.privatekeys)) SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) return onion_address else: return False def makeOnionAndKey(self): - res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port) - match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL) + res = self.request("ADD_ONION NEW:ED25519-V3 port=%s" % self.fileserver_port) + match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=ED25519-V3:(.*?)[\r\n]", res, re.DOTALL) if match: onion_address, onion_privatekey = match.groups() return (onion_address, onion_privatekey) else: - self.setStatus(u"AddOnion error (%s)" % res) + self.setStatus("AddOnion error (%s)" % res) self.log.error("Tor addOnion error: %s" % res) return False @@ -270,7 +234,7 @@ class TorManager(object): self.setStatus("OK (%s onion running)" % len(self.privatekeys)) return True else: - self.setStatus(u"DelOnion error (%s)" % res) + self.setStatus("DelOnion error (%s)" % res) self.log.error("Tor delOnion error: %s" % res) self.disconnect() return False @@ -291,15 +255,16 @@ class TorManager(object): back = "" for retry in range(2): try: - conn.sendall("%s\r\n" % cmd) + conn.sendall(b"%s\r\n" % cmd.encode("utf8")) while not back.endswith("250 OK\r\n"): - back += conn.recv(1024 * 64).decode("utf8", "ignore") + back += conn.recv(1024 * 64).decode("utf8") break - except Exception, err: + except Exception as err: self.log.error("Tor send error: %s, reconnecting..." % err) - self.disconnect() - time.sleep(1) - self.connect() + if not self.connecting: + self.disconnect() + time.sleep(1) + self.connect() back = None if back: self.log.debug("< %s" % back.strip()) @@ -309,7 +274,7 @@ class TorManager(object): return self.privatekeys[address] def getPublickey(self, address): - return CryptRsa.privatekeyToPublickey(self.privatekeys[address]) + return CryptTor.privatekeyToPublickey(self.privatekeys[address]) def getOnion(self, site_address): if not self.enabled: diff --git a/src/Tor/__init__.py b/src/Tor/__init__.py index 250eac2d..d0fcffaf 100644 --- a/src/Tor/__init__.py +++ b/src/Tor/__init__.py @@ -1 +1 @@ -from TorManager import TorManager \ No newline at end of file +from .TorManager import TorManager \ No newline at end of file diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py index 90b070b9..e73f9be1 100644 --- a/src/Translate/Translate.py +++ b/src/Translate/Translate.py @@ -3,7 +3,7 @@ import json import logging import inspect import re -import cgi +import html import string from Config import config @@ -15,8 +15,8 @@ class EscapeProxy(dict): # Automatically escape the accessed string values def __getitem__(self, key): val = dict.__getitem__(self, key) - if type(val) in (str, unicode): - return cgi.escape(val, quote=True) + if type(val) in (str, str): + return html.escape(val) elif type(val) is dict: return EscapeProxy(val) elif type(val) is list: @@ -28,7 +28,7 @@ class EscapeProxy(dict): class Translate(dict): def __init__(self, lang_dir=None, lang=None): if not lang_dir: - lang_dir = "src/Translate/languages/" + lang_dir = os.path.dirname(__file__) + "/languages/" if not lang: lang = config.language self.lang = lang @@ -39,7 +39,7 @@ class Translate(dict): if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader - DebugReloader(self.load) + DebugReloader.watcher.addCallback(self.load) translates.append(self) @@ -58,7 +58,7 @@ class Translate(dict): self.clear() elif os.path.isfile(self.lang_file): try: - data = json.load(open(self.lang_file)) + data = json.load(open(self.lang_file, encoding="utf8")) logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data))) except Exception as err: logging.error("Error loading translate file %s: %s" % (self.lang_file, err)) @@ -94,18 +94,16 @@ class Translate(dict): def pluralize(self, value, single, multi): if value > 1: - return self[single].format(value) - else: return self[multi].format(value) + else: + return self[single].format(value) def translateData(self, data, translate_table=None, mode="js"): if not translate_table: translate_table = self - data = data.decode("utf8") - patterns = [] - for key, val in translate_table.items(): + for key, val in list(translate_table.items()): if key.startswith("_("): # Problematic string: only match if called between _(" ") function key = key.replace("_(", "").replace(")", "").replace(", ", '", "') translate_table[key] = "|" + val @@ -128,6 +126,10 @@ class Translate(dict): else: pattern = '"(' + "|".join(patterns) + ')"' data = re.sub(pattern, replacer, data) - return data.encode("utf8") + + if mode == "html": + data = data.replace("lang={lang}", "lang=%s" % self.lang) # lang get parameter to .js file to avoid cache + + return data translate = Translate() diff --git a/src/Translate/__init__.py b/src/Translate/__init__.py index 40f34063..ba0ab6d4 100644 --- a/src/Translate/__init__.py +++ b/src/Translate/__init__.py @@ -1 +1 @@ -from Translate import * \ No newline at end of file +from .Translate import * \ No newline at end of file diff --git a/src/Translate/languages/fa.json b/src/Translate/languages/fa.json new file mode 100644 index 00000000..e644247a --- /dev/null +++ b/src/Translate/languages/fa.json @@ -0,0 +1,50 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "تبریک، درگاه {0} شما باز شده است.
    شما یک عضو تمام شبکه ZeroNet هستید!", + "Tor mode active, every connection using Onion route.": "حالت Tor فعال است، هر ارتباط از مسیریابی پیاز (Onion) استفاده می‌کند.", + "Successfully started Tor onion hidden services.": "خدمات پنهان پیاز (Onion) Tor با موفقیت راه‌اندازی شد.", + "Unable to start hidden services, please check your config.": "قادر به راه‌اندازی خدمات پنهان نیستیم، لطفا تنظیمات خود را بررسی نمایید.", + "For faster connections open {0} port on your router.": "برای ارتباطات سریعتر درگاه {0} را بر روی مسیریاب (روتر) خود باز نمایید.", + "Your connection is restricted. Please, open {0} port on your router": "ارتباط شما محدود‌شده است. لطفا درگاه {0} را در مسیریاب (روتر) خود باز نمایید", + "or configure Tor to become a full member of the ZeroNet network.": "یا پیکربندی Tor را انجام دهید تا به یک عضو تمام شبکه ZeroNet تبدیل شوید.", + + "Select account you want to use in this site:": "حسابی را که می‌خواهید در این سایت استفاده کنید، انتخاب کنید:", + "currently selected": "در حال حاضر انتخاب‌شده", + "Unique to site": "مختص به سایت", + + "Content signing failed": "امضای محتوا با شکست مواجه شد", + "Content publish queued for {0:.0f} seconds.": "محتوا در صف انتشار با {0:.0f} ثانیه تاخیر قرار گرفت.", + "Content published to {0} peers.": "محتوا برای {0} تعداد همتا انتشار یافت.", + "No peers found, but your content is ready to access.": "همتایی یافت نشد، اما محتوای شما آماده دسترسی است.", + "Your network connection is restricted. Please, open {0} port": "ارتباط شبکه شما محدود‌شده است. لطفا درگاه {0} را", + "on your router to make your site accessible for everyone.": "در مسیریاب (روتر) خود باز کنید تا سایت خود را برای همه در دسترس قرار دهید.", + "Content publish failed.": "انتشار محتوا موفق نبود.", + "This file still in sync, if you write it now, then the previous content may be lost.": "این فایل همچنان همگام است، اگز شما آن را بنویسید، ممکن است محتوای قبلی از‌بین رود.", + "Write content anyway": "در هر صورت محتوا را بنویس", + "New certificate added:": "گواهی جدیدی افزوده شد:", + "You current certificate:": "گواهی فعلی شما:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "تغییرش بده به {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "گواهینامه به: {auth_type}/{auth_user_name}@{domain} تغییر پیدا کرد.", + "Site cloned": "سایت همسان‌سازی شد", + + "You have successfully changed the web interface's language!": "شما با موفقیت زبان رابط وب را تغییر دادید!", + "Due to the browser's caching, the full transformation could take some minute.": "به دلیل ذخیره‌سازی در مرور‌گر، امکان دارد تغییر شکل کامل چند دقیقه طول بکشد.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "اتصال با UiServer Websocket قطع شد. اتصال دوباره...", + "Connection with UiServer Websocket recovered.": "ارتباط با UiServer Websocket دوباره بر‌قرار شد.", + "UiServer Websocket error, please reload the page.": "خطای UiServer Websocket, لطفا صفحه را دوباره بارگیری کنید.", + "   Connecting...": "   برقراری ارتباط...", + "Site size: ": "حجم سایت: ", + "MB is larger than default allowed ": "MB بیشتر از پیش‌فرض مجاز است ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "سایت را باز کرده و محدوده حجم را به \" + site_info.next_size_limit + \"MB تنظیم کن", + " files needs to be downloaded": " فایل‌هایی که نیاز است، دانلود شوند", + " downloaded": " دانلود شد", + " download failed": " دانلود موفق نبود", + "Peers found: ": "چند همتا یافت شد: ", + "No peers found": "همتایی یافت نشد", + "Running out of size limit (": "عبور کرده از محدوده حجم (", + "Set limit to \" + site_info.next_size_limit + \"MB": "محدوده را به \" + site_info.next_size_limit + \"MB تنظیم کن", + "Site size limit changed to {0}MB": "محدوده حجم سایت به {0}MB تغییر کرد", + " New version of this page has just released.
    Reload to see the modified content.": " نسخه جدیدی از این صفحه منتشر شده است.
    برای مشاهده محتوای تغییر‌یافته دوباره بارگیری نمایید.", + "This site requests permission:": "این سایت درخواست مجوز می‌کند:", + "_(Accept)": "_(پذیرفتن)" +} diff --git a/src/Translate/languages/it.json b/src/Translate/languages/it.json index f3ee5d87..47992328 100644 --- a/src/Translate/languages/it.json +++ b/src/Translate/languages/it.json @@ -39,7 +39,7 @@ " files needs to be downloaded": " i file devono essere scaricati", " downloaded": " scaricati", " download failed": " scaricamento fallito", - "Peers found: ": "Peers trovati: ", + "Peers found: ": "Peer trovati: ", "No peers found": "Nessun peer trovato", "Running out of size limit (": "Superato il limite di spazio (", "Set limit to \" + site_info.next_size_limit + \"MB": "Imposta il limite a \" + site_info.next_size_limit + \"MB", diff --git a/src/Translate/languages/jp.json b/src/Translate/languages/jp.json index 9978acc7..ff10aee4 100644 --- a/src/Translate/languages/jp.json +++ b/src/Translate/languages/jp.json @@ -1,82 +1,66 @@ { - "Peers": "ピア", - "Connected": "接続済み", - "Connectable": "利用可能", - "Connectable peers": "ピアに接続可能", + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "おめでとうございます。ポート {0} が開きました。これでZeroNetネットワークのメンバーです。", + "Tor mode active, every connection using Onion route.": "Torモードがアクティブです、全ての接続はOnionルートを使用します。", + "Successfully started Tor onion hidden services.": "Tor onionサービスを正常に開始しました。", + "Unable to start hidden services, please check your config.": "非表示のサービスを開始できません。設定を確認してください。", + "For faster connections open {0} port on your router.": "接続を高速化するにはルーターのポート {0} を開けてください。", + "Your connection is restricted. Please, open {0} port on your router": "接続が制限されています。ルーターのポート {0} を開けてください。", + "or configure Tor to become a full member of the ZeroNet network.": "または、TorをZeroNetネットワークのメンバーになるように設定してください。", - "Data transfer": "データ転送", - "Received": "受信", - "Received bytes": "受信バイト数", - "Sent": "送信", - "Sent bytes": "送信バイト数", + "Select account you want to use in this site:": "このサイトで使用するアカウントを選択:", + "No certificate": "証明書がありません", + "currently selected": "現在選択中", + "Unique to site": "サイト固有", - "Files": "ファイル", - "Total": "合計", - "Image": "画像", - "Other": "その他", - "User data": "ユーザーデータ", + "Content signing failed": "コンテンツの署名に失敗", + "Content publish queued for {0:.0f} seconds.": "コンテンツの公開は{0:.0f}秒のキューに入れられました。", + "Content published to {0}/{1} peers.": "サイトの更新を通知済 {0}/{1} ピア", + "Content published to {0} peers.": "{0}ピアに公開されたコンテンツ。", + "No peers found, but your content is ready to access.": "ピアは見つかりませんでしたが、コンテンツにアクセスする準備ができました。", + "Your network connection is restricted. Please, open {0} port": "ネットワーク接続が制限されています。ポート {0} を開いて、", + "on your router to make your site accessible for everyone.": "誰でもサイトにアクセスできるようにしてください。", + "Content publish failed.": "コンテンツの公開に失敗しました。", + "This file still in sync, if you write it now, then the previous content may be lost.": "このファイルはまだ同期しています。今すぐ書き込むと、前のコンテンツが失われる可能性があります。", + "Write content anyway": "とにかくコンテンツを書く", + "New certificate added:": "新しい証明書が追加されました:", + "You current certificate:": "現在の証明書:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} に変更", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "変更後の証明書: {auth_type}/{auth_user_name}@{domain}", + "Site cloned": "複製されたサイト", - "Size limit": "サイズ制限", - "limit used": "使用上限", - "free space": "フリースペース", - "Set": "セット", + "You have successfully changed the web interface's language!": "Webインターフェースの言語が正常に変更されました!", + "Due to the browser's caching, the full transformation could take some minute.": "ブラウザのキャッシュにより、完全な変換には数分かかる場合があります。", - "Optional files": "オプション ファイル", - "Downloaded": "ダウンロード済み", - "Download and help distribute all files": "ダウンロードしてすべてのファイルの配布を支援する", - "Total size": "合計サイズ", - "Downloaded files": "ダウンロードされたファイル", + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocketとの接続が失われました。再接続しています...", + "Connection with UiServer Websocket recovered.": "UiServer Websocketとの接続が回復しました。", + "UiServer Websocket error, please reload the page.": "UiServer Websocketエラー、ページをリロードしてください。", + "   Connecting...": "   接続しています...", + "Site size: ": "サイトサイズ: ", + "MB is larger than default allowed ": "MBはデフォルトの許容値よりも大きいです。 ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "サイトを開き、サイズ制限を \" + site_info.next_size_limit + \"MB に設定", + " files needs to be downloaded": " ファイルをダウンロードする必要があります", + " downloaded": " ダウンロード", + " download failed": " ダウンロード失敗", + "Peers found: ": "ピアが見つかりました: ", + "No peers found": "ピアが見つかりません", + "Running out of size limit (": "サイズ制限を使い果たしました (", + "Set limit to \" + site_info.next_size_limit + \"MB": "制限を \" + site_info.next_size_limit + \"MB に設定", + "Cloning site...": "サイトを複製中…", + "Site size limit changed to {0}MB": "サイトのサイズ制限が {0}MB に変更されました", + " New version of this page has just released.
    Reload to see the modified content.": " このページの新しいバージョンが公開されました。
    変更されたコンテンツを見るには再読み込みしてください。", + "This site requests permission:": "このサイトは権限を要求しています:", + "_(Accept)": "_(許可)", + + "Save": "保存", + "Trackers announcing": "トラッカーをお知らせ", + "Error": "エラー", + "Done": "完了", + "Tracker connection error detected.": "トラッカー接続エラーが検出されました。", - "Database": "データベース", - "search feeds": "フィードを検索する", - "{feeds} query": "{フィード} お問い合わせ", - "Reload": "再読込", - "Rebuild": "再ビルド", - "No database found": "データベースが見つかりません", - - "Identity address": "Identity address", - "Change": "編集", - - "Site control": "サイト管理", + "Update ZeroNet client to latest version?": "ZeroNetクライアントを最新版に更新しますか?", "Update": "更新", - "Pause": "一時停止", - "Resume": "再開", - "Delete": "削除", - "Are you sure?": "本当によろしいですか?", - - "Site address": "サイトアドレス", - "Donate": "寄付する", - - "Missing files": "ファイルがありません", - "{} try": "{} 試す", - "{} tries": "{} 試行", - "+ {num_bad_files} more": "+ {num_bad_files} more", - - "This is my site": "This is my site", - "Site title": "サイトタイトル", - "Site description": "サイトの説明", - "Save site settings": "サイトの設定を保存する", - - "Content publishing": "コンテンツを公開する", - "Choose": "選択", - "Sign": "Sign", - "Publish": "公開する", - - "This function is disabled on this proxy": "この機能はこのプロキシで無効になっています", - "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Cityデータベースのダウンロードエラー: {}!
    手動でダウンロードして、フォルダに解凍してください。:
    {}", - "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Cityデータベースの読み込み (これは一度だけ行われます, ~20MB)...", - "GeoLite2 City database downloaded!": "GeoLite2 Cityデータベースがダウンロードされました!", - - "Are you sure?": "本当によろしいですか?", - "Site storage limit modified!": "サイトの保存容量の制限が変更されました!", - "Database schema reloaded!": "データベーススキーマがリロードされました!", - "Database rebuilding....": "データベースの再構築中....", - "Database rebuilt!": "データベースが再構築されました!", - "Site updated!": "サイトが更新されました!", - "Delete this site": "このサイトを削除する", - "File write error: ": "ファイル書き込みエラー:", - "Site settings saved!": "サイト設定が保存されました!", - "Enter your private key:": "秘密鍵を入力してください:", - " Signed!": " Signed!", - "WebGL not supported": "WebGLはサポートされていません" -} \ No newline at end of file + "Restart ZeroNet client?": "ZeroNetクライアントを再起動しますか?", + "Restart": "再起動", + "Shut down ZeroNet client?": "ZeroNetクライアントを終了しますか?", + "Shut down": "終了" +} diff --git a/src/Translate/languages/pl.json b/src/Translate/languages/pl.json index 75caeceb..679e909d 100644 --- a/src/Translate/languages/pl.json +++ b/src/Translate/languages/pl.json @@ -13,8 +13,8 @@ "Content signing failed": "Podpisanie treści zawiodło", "Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).", - "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników równorzednych.", - "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników równorzędnych, ale twoja treść jest dostępna.", + "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników.", + "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników, ale twoja treść jest dostępna.", "Your network connection is restricted. Please, open {0} port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwórz port {0}", "on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.", "Content publish failed.": "Publikacja treści zawiodła.", @@ -39,13 +39,16 @@ " files needs to be downloaded": " pliki muszą zostać ściągnięte", " downloaded": " ściągnięte", " download failed": " ściąganie nie powiodło się", - "Peers found: ": "Odnaleziono użytkowników równorzednych: ", - "No peers found": "Nie odnaleziono użytkowników równorzędnych", + "Peers found: ": "Odnaleziono użytkowników: ", + "No peers found": "Nie odnaleziono użytkowników", "Running out of size limit (": "Limit rozmiaru na wyczerpaniu (", "Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBów", "Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBów", " New version of this page has just released.
    Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana.
    Odśwież by zobaczyć nową, zmodyfikowaną treść strony.", "This site requests permission:": "Ta strona wymaga uprawnień:", - "_(Accept)": "Przyznaj uprawnienia" + "_(Accept)": "Przyznaj uprawnienia", + "Sign and publish": "Podpisz i opublikuj", + "Restart ZeroNet client?": "Uruchomić ponownie klienta ZeroNet?", + "Restart": "Uruchom ponownie" } diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 1a2f4b2a..4a4e0545 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -3,7 +3,9 @@ import re import os import mimetypes import json -import cgi +import html +import urllib +import socket import gevent @@ -24,6 +26,23 @@ status_texts = { 500: "500 Internal Server Error", } +content_types = { + "asc": "application/pgp-keys", + "css": "text/css", + "gpg": "application/pgp-encrypted", + "html": "text/html", + "js": "application/javascript", + "json": "application/json", + "oga": "audio/ogg", + "ogg": "application/ogg", + "ogv": "video/ogg", + "sig": "application/pgp-signature", + "txt": "text/plain", + "webmanifest": "application/manifest+json", + "wasm": "application/wasm", + "webp": "image/webp" +} + class SecurityError(Exception): pass @@ -67,13 +86,19 @@ class UiRequest(object): return True if self.isProxyRequest(): # Support for chrome extension proxy - if self.server.site_manager.isDomain(host): + if self.isDomain(host): return True else: return False return False + def isDomain(self, address): + return self.server.site_manager.isDomainCached(address) + + def resolveDomain(self, domain): + return self.server.site_manager.resolveDomainCached(domain) + # Call the request handler function base on path def route(self, path): # Restict Ui access by ip @@ -82,10 +107,25 @@ class UiRequest(object): # Check if host allowed to do request if not self.isHostAllowed(self.env.get("HTTP_HOST")): - return self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False) + ret_error = next(self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False)) + + http_get = self.env["PATH_INFO"] + if self.env["QUERY_STRING"]: + http_get += "?{0}".format(self.env["QUERY_STRING"]) + self_host = self.env["HTTP_HOST"].split(":")[0] + self_ip = self.env["HTTP_HOST"].replace(self_host, socket.gethostbyname(self_host)) + link = "http://{0}{1}".format(self_ip, http_get) + ret_body = """ +

    Start the client with --ui_host "{host}" argument

    +

    or access via ip: {link}

    + """.format( + host=html.escape(self.env["HTTP_HOST"]), + link=html.escape(link) + ).encode("utf8") + return iter([ret_error, ret_body]) # Prepend .bit host for transparent proxy - if self.server.site_manager.isDomain(self.env.get("HTTP_HOST")): + if self.isDomain(self.env.get("HTTP_HOST")): path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path) path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access @@ -108,8 +148,8 @@ class UiRequest(object): if path == "/": return self.actionIndex() - elif path == "/favicon.ico": - return self.actionFile("src/Ui/media/img/favicon.ico") + elif path in ("/favicon.ico", "/apple-touch-icon.png"): + return self.actionFile("src/Ui/media/img/%s" % path) # Internal functions elif "/ZeroNet-Internal/" in path: path = re.sub(".*?/ZeroNet-Internal/", "/", path) @@ -157,11 +197,12 @@ class UiRequest(object): if func: return func() else: - return self.error404(path) + ret = self.error404(path) + return ret # The request is proxied by chrome extension or a transparent proxy def isProxyRequest(self): - return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.server.site_manager.isDomain(self.env.get("HTTP_HOST"))) + return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.isDomain(self.env.get("HTTP_HOST"))) def isWebSocketRequest(self): return self.env.get("HTTP_UPGRADE") == "websocket" @@ -171,26 +212,25 @@ class UiRequest(object): # Get mime by filename def getContentType(self, file_name): - content_type = mimetypes.guess_type(file_name)[0] + file_name = file_name.lower() + ext = file_name.rsplit(".", 1)[-1] - if content_type: - content_type = content_type.lower() - - if file_name.endswith(".css"): # Force correct css content type - content_type = "text/css" + if ext in content_types: + content_type = content_types[ext] + elif ext in ("ttf", "woff", "otf", "woff2", "eot", "sfnt", "collection"): + content_type = "font/%s" % ext + else: + content_type = mimetypes.guess_type(file_name)[0] if not content_type: - if file_name.endswith(".json"): # Correct json header - content_type = "application/json" - else: - content_type = "application/octet-stream" + content_type = "application/octet-stream" - return content_type + return content_type.lower() # Return: Posted variables def getPosted(self): if self.env['REQUEST_METHOD'] == "POST": - return dict(cgi.parse_qsl( + return dict(urllib.parse.parse_qsl( self.env['wsgi.input'].readline().decode() )) else: @@ -200,7 +240,7 @@ class UiRequest(object): def getCookies(self): raw_cookies = self.env.get('HTTP_COOKIE') if raw_cookies: - cookies = cgi.parse_qsl(raw_cookies) + cookies = urllib.parse.parse_qsl(raw_cookies) return {key.strip(): val for key, val in cookies} else: return {} @@ -246,19 +286,13 @@ class UiRequest(object): headers["Connection"] = "Keep-Alive" headers["Keep-Alive"] = "max=25, timeout=30" headers["X-Frame-Options"] = "SAMEORIGIN" - is_referer_allowed = False - if self.env.get("HTTP_REFERER"): - if self.isSameOrigin(self.getReferer(), self.getRequestUrl()): - is_referer_allowed = True - elif self.getReferer() == "%s://%s/" % (self.env["wsgi.url_scheme"], self.env["HTTP_HOST"]): # Origin-only referer - is_referer_allowed = True - if content_type != "text/html" and is_referer_allowed: + if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()): headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css if noscript: - headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src 'self'; font-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline';" + headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';" elif script_nonce and self.isScriptNonceSupported(): - headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self'; style-src 'self' 'unsafe-inline'; connect-src *; frame-src 'self'".format(script_nonce) + headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self' blob: data:; style-src 'self' blob: 'unsafe-inline'; connect-src *; frame-src 'self' blob:".format(script_nonce) if allow_ajax: headers["Access-Control-Allow-Origin"] = "null" @@ -268,47 +302,79 @@ class UiRequest(object): headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range" headers["Access-Control-Allow-Credentials"] = "true" - if content_type == "text/html": - content_type = "text/html; charset=utf-8" - if content_type == "text/plain": - content_type = "text/plain; charset=utf-8" - # Download instead of display file types that can be dangerous if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type): headers["Content-Disposition"] = "attachment" cacheable_type = ( - content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or - self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript" + self.env["REQUEST_METHOD"] == "OPTIONS" or + content_type.split("/", 1)[0] in ("image", "video", "font") or + content_type in ("application/javascript", "text/css") ) + if content_type in ("text/plain", "text/html", "text/css", "application/javascript", "application/json", "application/manifest+json"): + content_type += "; charset=utf-8" + if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min headers["Cache-Control"] = "public, max-age=600" # Cache 10 min else: headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all headers["Content-Type"] = content_type headers.update(extra_headers) - return self.start_response(status_texts[status], headers.items()) + return self.start_response(status_texts[status], list(headers.items())) # Renders a template def render(self, template_path, *args, **kwargs): - template = open(template_path).read() + template = open(template_path, encoding="utf8").read() + def renderReplacer(m): - return "%s" % kwargs.get(m.group(1), "") + if m.group(1) in kwargs: + return "%s" % kwargs.get(m.group(1), "") + else: + return m.group(0) template_rendered = re.sub("{(.*?)}", renderReplacer, template) return template_rendered.encode("utf8") + def isWrapperNecessary(self, path): + match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) + + if not match: + return True + + inner_path = match.group("inner_path").lstrip("/") + if not inner_path or path.endswith("/"): # It's a directory + content_type = self.getContentType("index.html") + else: # It's a file + content_type = self.getContentType(inner_path) + + is_html_file = "html" in content_type or "xhtml" in content_type + + return is_html_file + + @helper.encodeResponse + def formatRedirect(self, url): + return """ + + + Redirecting to {0} + + + + """.format(html.escape(url)) + # - Actions - # Redirect to an url def actionRedirect(self, url): self.start_response('301 Redirect', [('Location', str(url))]) - yield "Location changed: %s" % url + yield self.formatRedirect(url) def actionIndex(self): - return self.actionRedirect("/" + config.homepage) + return self.actionRedirect("/" + config.homepage + "/") # Render a file from media with iframe site wrapper def actionWrapper(self, path, extra_headers=None): @@ -316,20 +382,13 @@ class UiRequest(object): extra_headers = {} script_nonce = self.getScriptNonce() - match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) + match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) just_added = False if match: address = match.group("address") inner_path = match.group("inner_path").lstrip("/") - if not inner_path or path.endswith("/"): # It's a directory - content_type = self.getContentType("index.html") - else: # It's a file - content_type = self.getContentType(inner_path) - - is_html_file = "html" in content_type or "xhtml" in content_type - - if not is_html_file: + if not self.isWrapperNecessary(path): return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper if self.isAjaxRequest(): @@ -339,16 +398,13 @@ class UiRequest(object): return self.error403("WebSocket request not allowed to load wrapper") # No websocket if "text/html" not in self.env.get("HTTP_ACCEPT", ""): - return self.error403("Invalid Accept header to load wrapper") + return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", "")) if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""): return self.error403("Prefetch not allowed to load wrapper") site = SiteManager.site_manager.get(address) - if ( - site and site.content_manager.contents.get("content.json") and - (not site.getReachableBadFiles() or site.settings["own"]) - ): # Its downloaded or own + if site and site.content_manager.contents.get("content.json"): title = site.content_manager.contents["content.json"]["title"] else: title = "Loading %s..." % address @@ -367,7 +423,7 @@ class UiRequest(object): self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) min_last_announce = (time.time() - site.announcer.time_last_announce) / 60 - if min_last_announce > 60 and site.settings["serving"] and not just_added: + if min_last_announce > 60 and site.isServing() and not just_added: site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce) gevent.spawn(site.update, announce=True) @@ -383,6 +439,16 @@ class UiRequest(object): else: return "/" + address + def getWsServerUrl(self): + if self.isProxyRequest(): + if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 + server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] + else: # Remote client, use SERVER_NAME as server's real address + server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) + else: + server_url = "" + return server_url + def processQueryString(self, site, query_string): match = re.search("zeronet_peers=(.*?)(&|$)", query_string) if match: @@ -430,23 +496,23 @@ class UiRequest(object): wrapper_nonce = self.getWrapperNonce() inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", "")) - if inner_query_string: - inner_query_string = "?%s&wrapper_nonce=%s" % (inner_query_string, wrapper_nonce) - elif "?" in inner_path: - inner_query_string = "&wrapper_nonce=%s" % wrapper_nonce + if "?" in inner_path: + sep = "&" else: - inner_query_string = "?wrapper_nonce=%s" % wrapper_nonce + sep = "?" + + if inner_query_string: + inner_query_string = "%s%s&wrapper_nonce=%s" % (sep, inner_query_string, wrapper_nonce) + else: + inner_query_string = "%swrapper_nonce=%s" % (sep, wrapper_nonce) if self.isProxyRequest(): # Its a remote proxy request - if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 - server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] - else: # Remote client, use SERVER_NAME as server's real address - server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) homepage = "http://zero/" + config.homepage else: # Use relative path - server_url = "" homepage = "/" + config.homepage + server_url = self.getWsServerUrl() # Real server url for WS connections + user = self.getCurrentUser() if user: theme = user.settings.get("theme", "light") @@ -459,11 +525,11 @@ class UiRequest(object): content = site.content_manager.contents["content.json"] if content.get("background-color"): background_color = content.get("background-color-%s" % theme, content["background-color"]) - body_style += "background-color: %s;" % cgi.escape(background_color, True) + body_style += "background-color: %s;" % html.escape(background_color) if content.get("viewport"): - meta_tags += '' % cgi.escape(content["viewport"], True) + meta_tags += '' % html.escape(content["viewport"]) if content.get("favicon"): - meta_tags += '' % (root_url, cgi.escape(content["favicon"], True)) + meta_tags += '' % (root_url, html.escape(content["favicon"])) if content.get("postmessage_nonce_security"): postmessage_nonce_security = "true" @@ -474,18 +540,40 @@ class UiRequest(object): if show_loadingscreen is None: show_loadingscreen = not site.storage.isFile(file_inner_path) + + if show_loadingscreen: + meta_tags += ''; + + def xescape(s): + '''combines parts from re.escape & html.escape''' + # https://github.com/python/cpython/blob/3.10/Lib/re.py#L267 + # '&' is handled otherwise + re_chars = {i: '\\' + chr(i) for i in b'()[]{}*+-|^$\\.~# \t\n\r\v\f'} + # https://github.com/python/cpython/blob/3.10/Lib/html/__init__.py#L12 + html_chars = { + '<' : '<', + '>' : '>', + '"' : '"', + "'" : ''', + } + # we can't replace '&' because it makes certain zites work incorrectly + # it should however in no way interfere with re.sub in render + repl = {} + repl.update(re_chars) + repl.update(html_chars) + return s.translate(repl) return self.render( "src/Ui/template/wrapper.html", server_url=server_url, inner_path=inner_path, - file_url=re.escape(file_url), - file_inner_path=re.escape(file_inner_path), + file_url=xescape(file_url), + file_inner_path=xescape(file_inner_path), address=site.address, - title=cgi.escape(title, True), + title=xescape(title), body_style=body_style, meta_tags=meta_tags, - query_string=re.escape(inner_query_string), + query_string=xescape(inner_query_string), wrapper_key=site.settings["wrapper_key"], ajax_key=site.settings["ajax_key"], wrapper_nonce=wrapper_nonce, @@ -521,8 +609,18 @@ class UiRequest(object): def isSameOrigin(self, url_a, url_b): if not url_a or not url_b: return False - origin_a = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_a) - origin_b = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_b) + + url_a = url_a.replace("/raw/", "/") + url_b = url_b.replace("/raw/", "/") + + origin_pattern = "http[s]{0,1}://(.*?/.*?/).*" + is_origin_full = re.match(origin_pattern, url_a) + if not is_origin_full: # Origin looks trimmed to host, require only same host + origin_pattern = "http[s]{0,1}://(.*?/).*" + + origin_a = re.sub(origin_pattern, "\\1", url_a) + origin_b = re.sub(origin_pattern, "\\1", url_b) + return origin_a == origin_b # Return {address: 1Site.., inner_path: /data/users.json} from url path @@ -535,9 +633,11 @@ class UiRequest(object): if "../" in path or "./" in path: raise SecurityError("Invalid path") - match = re.match("/media/(?P
    [A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) + match = re.match(r"/media/(?P
    [A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) if match: path_parts = match.groupdict() + if self.isDomain(path_parts["address"]): + path_parts["address"] = self.resolveDomain(path_parts["address"]) path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites) path_parts["inner_path"] = path_parts["inner_path"].lstrip("/") if not path_parts["inner_path"]: @@ -557,9 +657,10 @@ class UiRequest(object): return self.error404(path) address = path_parts["address"] + file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) - if config.debug and file_path.split("/")[-1].startswith("all."): + if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."): # If debugging merge *.css to all.css and *.js to all.js site = self.server.sites.get(address) if site and site.settings["own"]: @@ -603,7 +704,7 @@ class UiRequest(object): return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) else: self.log.debug("File not found: %s" % path_parts["inner_path"]) - return self.error404(path_parts["inner_path"]) + return self.error404(path) # Serve a media for ui def actionUiMedia(self, path): @@ -611,26 +712,29 @@ class UiRequest(object): if match: # Looks like a valid path file_path = "src/Ui/media/%s" % match.group("inner_path") allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed - if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): + if "../" in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path return self.error403() else: - if config.debug and match.group("inner_path").startswith("all."): + if (config.debug or config.merge_media) and match.group("inner_path").startswith("all."): # If debugging merge *.css to all.css and *.js to all.js from Debug import DebugMedia DebugMedia.merge(file_path) return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content + else: # Bad url return self.error400() def actionSiteAdd(self): - post = dict(cgi.parse_qsl(self.env["wsgi.input"].read())) + post_data = self.env["wsgi.input"].read().decode() + post = dict(urllib.parse.parse_qsl(post_data)) if post["add_nonce"] not in self.server.add_nonces: return self.error403("Add nonce error.") self.server.add_nonces.remove(post["add_nonce"]) SiteManager.site_manager.need(post["address"]) return self.actionRedirect(post["url"]) + @helper.encodeResponse def actionSiteAddPrompt(self, path): path_parts = self.parsePath(path) if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]): @@ -638,15 +742,18 @@ class UiRequest(object): self.sendHeader(200, "text/html", noscript=True) template = open("src/Ui/template/site_add.html").read() - template = template.replace("{url}", cgi.escape(self.env["PATH_INFO"], True)) + template = template.replace("{url}", html.escape(self.env["PATH_INFO"])) template = template.replace("{address}", path_parts["address"]) template = template.replace("{add_nonce}", self.getAddNonce()) return template def replaceHtmlVariables(self, block, path_parts): user = self.getCurrentUser() - themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) - block = block.replace("{themeclass}", themeclass.encode("utf8")) + if user and user.settings: + themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) + else: + themeclass = "theme-light" + block = block.replace(b"{themeclass}", themeclass.encode("utf8")) if path_parts: site = self.server.sites.get(path_parts.get("address")) @@ -654,28 +761,33 @@ class UiRequest(object): modified = int(time.time()) else: modified = int(site.content_manager.contents["content.json"]["modified"]) - block = block.replace("{site_modified}", str(modified)) + block = block.replace(b"{site_modified}", str(modified).encode("utf8")) return block # Stream a file to client - def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, file_size=None, file_obj=None, path_parts=None): + def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, extra_headers={}, file_size=None, file_obj=None, path_parts=None): + file_name = os.path.basename(file_path) + if file_size is None: file_size = helper.getFilesize(file_path) if file_size is not None: # Try to figure out content type by extension - content_type = self.getContentType(file_path) + content_type = self.getContentType(file_name) range = self.env.get("HTTP_RANGE") range_start = None - is_html_file = file_path.endswith(".html") + is_html_file = file_name.endswith(".html") if is_html_file: header_length = False if send_header: - extra_headers = {} + extra_headers = extra_headers.copy() + content_encoding = self.get.get("zeronet_content_encoding", "") + if all(part.strip() in ("gzip", "compress", "deflate", "identity", "br") for part in content_encoding.split(",")): + extra_headers["Content-Encoding"] = content_encoding extra_headers["Accept-Ranges"] = "bytes" if header_length: extra_headers["Content-Length"] = str(file_size) @@ -711,7 +823,8 @@ class UiRequest(object): file_obj.close() break else: # File not exists - yield self.error404(file_path) + for part in self.error404(str(file_path)): + yield part # On websocket connection def actionWebsocket(self): @@ -725,52 +838,53 @@ class UiRequest(object): if origin: origin_host = origin.split("://", 1)[-1] if origin_host != host and origin_host not in self.server.allowed_ws_origins: - ws.send(json.dumps({"error": "Invalid origin: %s" % origin})) - return self.error403("Invalid origin: %s" % origin) + error_message = "Invalid origin: %s (host: %s, allowed: %s)" % (origin, host, self.server.allowed_ws_origins) + ws.send(json.dumps({"error": error_message})) + return self.error403(error_message) # Find site by wrapper_key wrapper_key = self.get["wrapper_key"] site = None - for site_check in self.server.sites.values(): + for site_check in list(self.server.sites.values()): if site_check.settings["wrapper_key"] == wrapper_key: site = site_check if site: # Correct wrapper key try: user = self.getCurrentUser() - except Exception, err: - self.log.error("Error in data/user.json: %s" % err) - return self.error500() + except Exception as err: + ws.send(json.dumps({"error": "Error in data/user.json: %s" % err})) + return self.error500("Error in data/user.json: %s" % err) if not user: - self.log.error("No user found") - return self.error403() + ws.send(json.dumps({"error": "No user found"})) + return self.error403("No user found") ui_websocket = UiWebsocket(ws, site, self.server, user, self) site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events self.server.websockets.append(ui_websocket) ui_websocket.start() self.server.websockets.remove(ui_websocket) - for site_check in self.server.sites.values(): + for site_check in list(self.server.sites.values()): # Remove websocket from every site (admin sites allowed to join other sites event channels) if ui_websocket in site_check.websockets: site_check.websockets.remove(ui_websocket) - return "Bye." + return [b"Bye."] else: # No site found by wrapper key - self.log.error("Wrapper key not found: %s" % wrapper_key) - return self.error403() + ws.send(json.dumps({"error": "Wrapper key not found: %s" % wrapper_key})) + return self.error403("Wrapper key not found: %s" % wrapper_key) else: self.start_response("400 Bad Request", []) - return "Not a websocket!" + return [b"Not a websocket request!"] # Debug last error def actionDebug(self): # Raise last error from DebugHook - import sys - last_error = sys.modules["main"].DebugHook.last_error + import main + last_error = main.DebugHook.last_error if last_error: - raise last_error[0], last_error[1], last_error[2] + raise last_error[0](last_error[1]).with_traceback(last_error[2]) else: self.sendHeader() - return "No error! :)" + return [b"No error! :)"] # Just raise an error to get console def actionConsole(self): @@ -805,30 +919,33 @@ class UiRequest(object): # Send bad request error def error400(self, message=""): self.sendHeader(400, noscript=True) + self.log.error("Error 400: %s" % message) return self.formatError("Bad Request", message) # You are not allowed to access this def error403(self, message="", details=True): self.sendHeader(403, noscript=True) - self.log.error("Error 403: %s" % message) + self.log.warning("Error 403: %s" % message) return self.formatError("Forbidden", message, details=details) # Send file not found error def error404(self, path=""): self.sendHeader(404, noscript=True) - return self.formatError("Not Found", path.encode("utf8"), details=False) + return self.formatError("Not Found", path, details=False) # Internal server error def error500(self, message=":("): self.sendHeader(500, noscript=True) + self.log.error("Error 500: %s" % message) return self.formatError("Server error", message) + @helper.encodeResponse def formatError(self, title, message, details=True): import sys import gevent if details and config.debug: - details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key} + details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key} details["version_zeronet"] = "%s r%s" % (config.version, config.rev) details["version_python"] = sys.version details["version_gevent"] = gevent.__version__ @@ -845,9 +962,13 @@ class UiRequest(object):

    Please report it if you think this an error.

    Details:

    %s
    - """ % (title, cgi.escape(message), cgi.escape(json.dumps(details, indent=4, sort_keys=True))) + """ % (title, html.escape(message), html.escape(json.dumps(details, indent=4, sort_keys=True))) else: return """ +

    %s

    %s

    - """ % (title, cgi.escape(message)) + """ % (title, html.escape(message)) diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py index 03d56162..61943ada 100644 --- a/src/Ui/UiServer.py +++ b/src/Ui/UiServer.py @@ -1,22 +1,21 @@ import logging import time -import cgi +import urllib import socket -import sys import gevent from gevent.pywsgi import WSGIServer -from gevent.pywsgi import WSGIHandler -from lib.geventwebsocket.handler import WebSocketHandler +from lib.gevent_ws import WebSocketHandler -from UiRequest import UiRequest +from .UiRequest import UiRequest from Site import SiteManager from Config import config from Debug import Debug +import importlib # Skip websocket handler if not necessary -class UiWSGIHandler(WSGIHandler): +class UiWSGIHandler(WebSocketHandler): def __init__(self, *args, **kwargs): self.server = args[2] @@ -24,25 +23,25 @@ class UiWSGIHandler(WSGIHandler): self.args = args self.kwargs = kwargs + def handleError(self, err): + if config.debug: # Allow websocket errors to appear on /Debug + import main + main.DebugHook.handleError() + else: + ui_request = UiRequest(self.server, {}, self.environ, self.start_response) + block_gen = ui_request.error500("UiWSGIHandler error: %s" % Debug.formatExceptionMessage(err)) + for block in block_gen: + self.write(block) + def run_application(self): - if "HTTP_UPGRADE" in self.environ: # Websocket request - try: - ws_handler = WebSocketHandler(*self.args, **self.kwargs) - ws_handler.__dict__ = self.__dict__ # Match class variables - ws_handler.run_application() - except Exception, err: - logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err)) - if config.debug: # Allow websocket errors to appear on /Debug - import sys - sys.modules["main"].DebugHook.handleError() - else: # Standard HTTP request - try: - super(UiWSGIHandler, self).run_application() - except Exception, err: - logging.error("UiWSGIHandler error: %s" % Debug.formatException(err)) - if config.debug: # Allow websocket errors to appear on /Debug - import sys - sys.modules["main"].DebugHook.handleError() + err_name = "UiWSGIHandler websocket" if "HTTP_UPGRADE" in self.environ else "UiWSGIHandler" + try: + super(UiWSGIHandler, self).run_application() + except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as err: + logging.warning("%s connection error: %s" % (err_name, err)) + except Exception as err: + logging.warning("%s error: %s" % (err_name, Debug.formatException(err))) + self.handleError(err) def handle(self): # Save socket to be able to close them properly on exit @@ -52,14 +51,15 @@ class UiWSGIHandler(WSGIHandler): class UiServer: - def __init__(self): self.ip = config.ui_ip self.port = config.ui_port + self.running = False if self.ip == "*": self.ip = "0.0.0.0" # Bind all if config.ui_host: self.allowed_hosts = set(config.ui_host) + #TODO: For proxies allow sub domains(www) as valid hosts, should be user preference. elif config.ui_ip == "127.0.0.1": # IP Addresses are inherently allowed as they are immune to DNS # rebinding attacks. @@ -74,8 +74,8 @@ class UiServer: self.allowed_hosts.update(["localhost"]) else: self.allowed_hosts = set([]) - self.allow_trans_proxy = config.ui_trans_proxy self.allowed_ws_origins = set() + self.allow_trans_proxy = config.ui_trans_proxy self.wrapper_nonces = [] self.add_nonces = [] @@ -83,6 +83,10 @@ class UiServer: self.site_manager = SiteManager.site_manager self.sites = SiteManager.site_manager.list() self.log = logging.getLogger(__name__) + config.error_logger.onNewRecord = self.handleErrorLogRecord + + def handleErrorLogRecord(self, record): + self.updateWebsocket(log_event=record.levelname) # After WebUI started def afterStarted(self): @@ -91,9 +95,9 @@ class UiServer: # Handle WSGI request def handleRequest(self, env, start_response): - path = env["PATH_INFO"] + path = bytes(env["PATH_INFO"], "raw-unicode-escape").decode("utf8") if env.get("QUERY_STRING"): - get = dict(cgi.parse_qsl(env['QUERY_STRING'])) + get = dict(urllib.parse.parse_qsl(env['QUERY_STRING'])) else: get = {} ui_request = UiRequest(self, get, env, start_response) @@ -102,7 +106,7 @@ class UiServer: else: # Catch and display the error try: return ui_request.route(path) - except Exception, err: + except Exception as err: logging.debug("UiRequest error: %s" % Debug.formatException(err)) return ui_request.error500("Err: %s" % Debug.formatException(err)) @@ -111,30 +115,34 @@ class UiServer: global UiRequest import imp import sys - reload(sys.modules["User.UserManager"]) - reload(sys.modules["Ui.UiWebsocket"]) + importlib.reload(sys.modules["User.UserManager"]) + importlib.reload(sys.modules["Ui.UiWebsocket"]) UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest # UiRequest.reload() # Bind and run the server def start(self): + self.running = True handler = self.handleRequest if config.debug: # Auto reload UiRequest on change from Debug import DebugReloader - DebugReloader(self.reload) + DebugReloader.watcher.addCallback(self.reload) # Werkzeug Debugger try: from werkzeug.debug import DebuggedApplication handler = DebuggedApplication(self.handleRequest, evalex=True) - except Exception, err: + except Exception as err: self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err) from Debug import DebugReloader self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log self.log.info("--------------------------------------") - self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) + if ":" in config.ui_ip: + self.log.info("Web interface: http://[%s]:%s/" % (config.ui_ip, config.ui_port)) + else: + self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) self.log.info("--------------------------------------") if config.open_browser and config.open_browser != "False": @@ -148,42 +156,52 @@ class UiServer: url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage) gevent.spawn_later(0.3, browser.open, url, new=2) except Exception as err: - print "Error starting browser: %s" % err + print("Error starting browser: %s" % err) self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log) self.server.sockets = {} self.afterStarted() try: self.server.serve_forever() - except Exception, err: + except Exception as err: self.log.error("Web interface bind error, must be running already, exiting.... %s" % err) - sys.modules["main"].file_server.stop() + import main + main.file_server.stop() self.log.debug("Stopped.") def stop(self): self.log.debug("Stopping...") # Close WS sockets if "clients" in dir(self.server): - for client in self.server.clients.values(): + for client in list(self.server.clients.values()): client.ws.close() # Close http sockets sock_closed = 0 - for sock in self.server.sockets.values(): + for sock in list(self.server.sockets.values()): try: - sock.send("bye") + sock.send(b"bye") sock.shutdown(socket.SHUT_RDWR) # sock._sock.close() # sock.close() sock_closed += 1 - except Exception, err: + except Exception as err: self.log.debug("Http connection close error: %s" % err) self.log.debug("Socket closed: %s" % sock_closed) time.sleep(0.1) + if config.debug: + from Debug import DebugReloader + DebugReloader.watcher.stop() self.server.socket.close() self.server.stop() + self.running = False time.sleep(1) def updateWebsocket(self, **kwargs): + if kwargs: + param = {"event": list(kwargs.items())[0]} + else: + param = None + for ws in self.websockets: - ws.event("serverChanged", kwargs) + ws.event("serverChanged", param) diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 580ff2af..2f982e1d 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -6,29 +6,25 @@ import shutil import re import copy import logging +import stat import gevent from Config import config from Site import SiteManager +from Crypt import CryptBitcoin from Debug import Debug from util import QueryJson, RateLimit from Plugin import PluginManager from Translate import translate as _ from util import helper from util import SafeRe +from util.Flag import flag from Content.ContentManager import VerifyError, SignError @PluginManager.acceptPlugins class UiWebsocket(object): - admin_commands = set([ - "sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteAdd", "siteListModifiedFiles", "siteSetSettingsValue", - "channelJoinAllsite", "serverUpdate", "serverPortcheck", "serverShutdown", "serverShowdirectory", "serverGetWrapperNonce", - "certSet", "certList", "configSet", "permissionAdd", "permissionRemove", "announcerStats", "userSetGlobalSettings" - ]) - async_commands = set(["fileGet", "fileList", "dirList", "fileNeed", "serverPortcheck", "siteListModifiedFiles"]) - def __init__(self, ws, site, server, user, request): self.ws = ws self.site = site @@ -49,13 +45,14 @@ class UiWebsocket(object): if self.site.address == config.homepage and not self.site.page_requested: # Add open fileserver port message or closed port error to homepage at first request after start self.site.page_requested = True # Dont add connection notification anymore - file_server = sys.modules["main"].file_server + import main + file_server = main.file_server if not file_server.port_opened or file_server.tor_manager.start_onions is None: self.site.page_requested = False # Not ready yet, check next time else: try: self.addHomepageNotifications() - except Exception, err: + except Exception as err: self.log.error("Uncaught Exception: " + Debug.formatException(err)) for notification in self.site.notifications: # Send pending notification messages @@ -73,7 +70,7 @@ class UiWebsocket(object): break else: message = ws.receive() - except Exception, err: + except Exception as err: self.log.error("WebSocket receive error: %s" % Debug.formatException(err)) break @@ -81,13 +78,19 @@ class UiWebsocket(object): try: req = json.loads(message) self.handleRequest(req) - except Exception, err: + except Exception as err: if config.debug: # Allow websocket errors to appear on /Debug - sys.modules["main"].DebugHook.handleError() + import main + main.DebugHook.handleError() self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message)) if not self.hasPlugin("Multiuser"): self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) + self.onClosed() + + def onClosed(self): + pass + def dedent(self, text): return re.sub("[\\r\\n\\x20\\t]+", " ", text.strip().replace("
    ", " ")) @@ -99,63 +102,18 @@ class UiWebsocket(object): if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist): self.site.notifications.append([ "error", - _(u"You are not going to set up a public gateway. However, your Web UI is
    " + + _("You are not going to set up a public gateway. However, your Web UI is
    " + "open to the whole Internet.
    " + "Please check your configuration.") ]) - file_server = sys.modules["main"].file_server - if any(file_server.port_opened.values()): - self.site.notifications.append([ - "done", - _["Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!"].format(config.fileserver_port), - 10000 - ]) - elif config.tor == "always" and file_server.tor_manager.start_onions: - self.site.notifications.append([ - "done", - _(u""" - {_[Tor mode active, every connection using Onion route.]}
    - {_[Successfully started Tor onion hidden services.]} - """), - 10000 - ]) - elif config.tor == "always" and file_server.tor_manager.start_onions is not False: - self.site.notifications.append([ - "error", - _(u""" - {_[Tor mode active, every connection using Onion route.]}
    - {_[Unable to start hidden services, please check your config.]} - """), - 0 - ]) - elif file_server.tor_manager.start_onions: - self.site.notifications.append([ - "done", - _(u""" - {_[Successfully started Tor onion hidden services.]}
    - {_[For faster connections open {0} port on your router.]} - """).format(config.fileserver_port), - 10000 - ]) - else: - self.site.notifications.append([ - "error", - _(u""" - {_[Your connection is restricted. Please, open {0} port on your router]}
    - {_[or configure Tor to become a full member of the ZeroNet network.]} - """).format(config.fileserver_port), - 0 - ]) - def hasPlugin(self, name): return name in PluginManager.plugin_manager.plugin_names # Has permission to run the command def hasCmdPermission(self, cmd): - cmd = cmd[0].lower() + cmd[1:] - - if cmd in self.admin_commands and "ADMIN" not in self.permissions: + flags = flag.db.get(self.getCmdFuncName(cmd), ()) + if "admin" in flags and "ADMIN" not in self.permissions: return False else: return True @@ -182,6 +140,8 @@ class UiWebsocket(object): self.cmd("setSiteInfo", site_info) elif channel == "serverChanged": server_info = self.formatServerInfo() + if len(params) > 0 and params[0]: # Extra data + server_info.update(params[0]) self.cmd("setServerInfo", server_info) elif channel == "announcerChanged": site = params[0] @@ -213,7 +173,7 @@ class UiWebsocket(object): message = self.send_queue.pop(0) self.ws.send(json.dumps(message)) self.state["sending"] = False - except Exception, err: + except Exception as err: self.log.debug("Websocket send error: %s" % Debug.formatException(err)) self.state["sending"] = False @@ -230,9 +190,10 @@ class UiWebsocket(object): result = func(*args, **kwargs) if result is not None: self.response(args[0], result) - except Exception, err: + except Exception as err: if config.debug: # Allow websocket errors to appear on /Debug - sys.modules["main"].DebugHook.handleError() + import main + main.DebugHook.handleError() self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) @@ -240,6 +201,10 @@ class UiWebsocket(object): gevent.spawn(asyncErrorWatcher, func, *args, **kwargs) return wrapper + def getCmdFuncName(self, cmd): + func_name = "action" + cmd[0].upper() + cmd[1:] + return func_name + # Handle incoming messages def handleRequest(self, req): @@ -249,17 +214,21 @@ class UiWebsocket(object): if cmd == "response": # It's a response to a command return self.actionResponse(req["to"], req["result"]) - elif not self.hasCmdPermission(cmd): # Admin commands - return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) else: # Normal command - func_name = "action" + cmd[0].upper() + cmd[1:] + func_name = self.getCmdFuncName(cmd) func = getattr(self, func_name, None) + if self.site.settings.get("deleting"): + return self.response(req["id"], {"error": "Site is deleting"}) + if not func: # Unknown command - self.response(req["id"], {"error": "Unknown command: %s" % cmd}) - return + return self.response(req["id"], {"error": "Unknown command: %s" % cmd}) + + if not self.hasCmdPermission(cmd): # Admin commands + return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) # Execute in parallel - if cmd in self.async_commands: + func_flags = flag.db.get(self.getCmdFuncName(cmd), ()) + if func_flags and "async_run" in func_flags: func = self.asyncWrapper(func) # Support calling as named, unnamed parameters and raw first argument too @@ -292,13 +261,13 @@ class UiWebsocket(object): settings = site.settings.copy() del settings["wrapper_key"] # Dont expose wrapper key - del settings["auth_key"] # Dont send auth key twice ret = { - "auth_key": self.site.settings["auth_key"], # Obsolete, will be removed "auth_address": self.user.getAuthAddress(site.address, create=create_user), "cert_user_id": self.user.getCertUserId(site.address), "address": site.address, + "address_short": site.address_short, + "address_hash": site.address_hash.hex(), "settings": settings, "content_updated": site.content_updated, "bad_files": len(site.bad_files), @@ -312,17 +281,18 @@ class UiWebsocket(object): } if site.settings["own"]: ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey")) - if site.settings["serving"] and content: + if site.isServing() and content: ret["peers"] += 1 # Add myself if serving return ret def formatServerInfo(self): - file_server = sys.modules["main"].file_server + import main + file_server = main.file_server if file_server.port_opened == {}: ip_external = None else: ip_external = any(file_server.port_opened.values()) - return { + back = { "ip_external": ip_external, "port_opened": file_server.port_opened, "platform": sys.platform, @@ -339,9 +309,16 @@ class UiWebsocket(object): "timecorrection": file_server.timecorrection, "language": config.language, "debug": config.debug, + "offline": config.offline, "plugins": PluginManager.plugin_manager.plugin_names, + "plugins_rev": PluginManager.plugin_manager.plugins_rev, "user_settings": self.user.settings } + if "ADMIN" in self.site.settings["permissions"]: + back["updatesite"] = config.updatesite + back["dist_type"] = config.dist_type + back["lib_verify_best"] = CryptBitcoin.lib_verify_best + return back def formatAnnouncerInfo(self, site): return {"address": site.address, "stats": site.announcer.stats} @@ -350,7 +327,10 @@ class UiWebsocket(object): def actionAs(self, to, address, cmd, params=[]): if not self.hasSitePermission(address, cmd=cmd): + #TODO! Return this as error ? return self.response(to, "No permission for site %s" % address) + if not self.server.sites.get(address): + return self.response(to, {"error": "Site Does Not Exist: %s" % address}) req_self = copy.copy(self) req_self.site = self.server.sites.get(address) req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site @@ -377,6 +357,9 @@ class UiWebsocket(object): ret["event"] = ("file_done", file_status) self.response(to, ret) + def actionSiteBadFiles(self, to): + return list(self.site.bad_files.keys()) + # Join to an event channel def actionChannelJoin(self, to, channels): if type(channels) != list: @@ -386,12 +369,15 @@ class UiWebsocket(object): if channel not in self.channels: self.channels.append(channel) + self.response(to, "ok") + # Server variables def actionServerInfo(self, to): back = self.formatServerInfo() self.response(to, back) # Create a new wrapper nonce that allows to load html file + @flag.admin def actionServerGetWrapperNonce(self, to): wrapper_nonce = self.request.getWrapperNonce() self.response(to, wrapper_nonce) @@ -400,17 +386,18 @@ class UiWebsocket(object): back = self.formatAnnouncerInfo(self.site) self.response(to, back) + @flag.admin def actionAnnouncerStats(self, to): back = {} trackers = self.site.announcer.getTrackers() - for site in self.server.sites.values(): - for tracker, stats in site.announcer.stats.iteritems(): + for site in list(self.server.sites.values()): + for tracker, stats in site.announcer.stats.items(): if tracker not in trackers: continue if tracker not in back: back[tracker] = {} is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"]) - for key, val in stats.iteritems(): + for key, val in stats.items(): if key.startswith("num_"): back[tracker][key] = back[tracker].get(key, 0) + val elif is_latest_data: @@ -435,10 +422,15 @@ class UiWebsocket(object): is_user_content = file_info and ("cert_signers" in file_info or "cert_signers_pattern" in file_info) if is_user_content and privatekey is None: cert = self.user.getCert(self.site.address) - extend["cert_auth_type"] = cert["auth_type"] - extend["cert_user_id"] = self.user.getCertUserId(site.address) - extend["cert_sign"] = cert["cert_sign"] - self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) + if not cert: + error = "Site sign failed: No certificate selected for Site: %s, Hence Signing inner_path: %s Failed, Try Adding/Selecting User Cert via Site Login" % (self.site.address, inner_path) + self.log.error(error) + return self.response(to, {"error": error}) + else: + extend["cert_auth_type"] = cert["auth_type"] + extend["cert_user_id"] = self.user.getCertUserId(site.address) + extend["cert_sign"] = cert["cert_sign"] + self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) if not self.hasFilePermission(inner_path): self.log.error("SiteSign error: you don't own this site & site owner doesn't allow you to do so.") @@ -446,6 +438,11 @@ class UiWebsocket(object): if privatekey == "stored": # Get privatekey from sites.json privatekey = self.user.getSiteData(self.site.address).get("privatekey") + if not privatekey: + self.cmd("notification", ["error", _["Content signing failed"] + "
    Private key not found in sites.json "]) + self.response(to, {"error": "Site sign failed: Private key not stored."}) + self.log.error("Site sign failed: %s: Private key not stored in sites.json" % inner_path) + return if not privatekey: # Get privatekey from users.json auth_address privatekey = self.user.getAuthPrivatekey(self.site.address) @@ -521,7 +518,7 @@ class UiWebsocket(object): progress ]) diffs = site.content_manager.getDiffs(inner_path) - back = site.publish(limit=5, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) + back = site.publish(limit=10, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) if back == 0: # Failed to publish to anyone self.cmd("progress", ["publish", _["Content publish failed."], -100]) else: @@ -539,7 +536,8 @@ class UiWebsocket(object): self.response(to, "ok") else: if len(site.peers) == 0: - if any(sys.modules["main"].file_server.port_opened.values()) or sys.modules["main"].file_server.tor_manager.start_onions: + import main + if any(main.file_server.port_opened.values()) or main.file_server.tor_manager.start_onions: if notification: self.cmd("notification", ["info", _["No peers found, but your content is ready to access."]]) if callback: @@ -548,7 +546,7 @@ class UiWebsocket(object): if notification: self.cmd("notification", [ "info", - _(u"""{_[Your network connection is restricted. Please, open {0} port]}
    + _("""{_[Your network connection is restricted. Please, open {0} port]}
    {_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port) ]) if callback: @@ -580,7 +578,7 @@ class UiWebsocket(object): self.cmd( "confirm", [_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]], - lambda (res): self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) + lambda res: self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) ) return False @@ -601,7 +599,7 @@ class UiWebsocket(object): shutil.copyfileobj(f_old, f_new) self.site.storage.write(inner_path, content) - except Exception, err: + except Exception as err: self.log.error("File write error: %s" % Debug.formatException(err)) return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)}) @@ -636,9 +634,9 @@ class UiWebsocket(object): if need_delete: try: self.site.storage.delete(inner_path) - except Exception, err: + except Exception as err: self.log.error("File delete error: %s" % err) - return self.response(to, {"error": "Delete error: %s" % err}) + return self.response(to, {"error": "Delete error: %s" % Debug.formatExceptionMessage(err)}) self.response(to, "ok") @@ -656,18 +654,32 @@ class UiWebsocket(object): return self.response(to, rows) # List files in directory + @flag.async_run def actionFileList(self, to, inner_path): try: return list(self.site.storage.walk(inner_path)) except Exception as err: - return {"error": str(err)} + self.log.error("fileList %s error: %s" % (inner_path, Debug.formatException(err))) + return {"error": Debug.formatExceptionMessage(err)} # List directories in a directory - def actionDirList(self, to, inner_path): + @flag.async_run + def actionDirList(self, to, inner_path, stats=False): try: - return list(self.site.storage.list(inner_path)) + if stats: + back = [] + for file_name in self.site.storage.list(inner_path): + file_stats = os.stat(self.site.storage.getPath(inner_path + "/" + file_name)) + is_dir = stat.S_ISDIR(file_stats.st_mode) + back.append( + {"name": file_name, "size": file_stats.st_size, "is_dir": is_dir} + ) + return back + else: + return list(self.site.storage.list(inner_path)) except Exception as err: - return {"error": str(err)} + self.log.error("dirList %s error: %s" % (inner_path, Debug.formatException(err))) + return {"error": Debug.formatExceptionMessage(err)} # Sql query def actionDbQuery(self, to, query, params=None, wait_for=None): @@ -676,9 +688,9 @@ class UiWebsocket(object): rows = [] try: res = self.site.storage.query(query, params) - except Exception, err: # Response the error to client - self.log.error("DbQuery error: %s" % err) - return self.response(to, {"error": str(err)}) + except Exception as err: # Response the error to client + self.log.error("DbQuery error: %s" % Debug.formatException(err)) + return self.response(to, {"error": Debug.formatExceptionMessage(err)}) # Convert result to dict for row in res: rows.append(dict(row)) @@ -687,26 +699,36 @@ class UiWebsocket(object): return self.response(to, rows) # Return file content - def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300): + @flag.async_run + def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300, priority=6): try: if required or inner_path in self.site.bad_files: with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=6) + self.site.needFile(inner_path, priority=priority) body = self.site.storage.read(inner_path, "rb") - except Exception, err: - self.log.error("%s fileGet error: %s" % (inner_path, err)) + except (Exception, gevent.Timeout) as err: + self.log.debug("%s fileGet error: %s" % (inner_path, Debug.formatException(err))) body = None - if body and format == "base64": + + if not body: + body = None + elif format == "base64": import base64 - body = base64.b64encode(body) + body = base64.b64encode(body).decode() + else: + try: + body = body.decode() + except Exception as err: + self.response(to, {"error": "Error decoding text: %s" % err}) self.response(to, body) - def actionFileNeed(self, to, inner_path, timeout=300): + @flag.async_run + def actionFileNeed(self, to, inner_path, timeout=300, priority=6): try: with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=6) - except Exception, err: - return self.response(to, {"error": str(err)}) + self.site.needFile(inner_path, priority=priority) + except (Exception, gevent.Timeout) as err: + return self.response(to, {"error": Debug.formatExceptionMessage(err)}) return self.response(to, "ok") def actionFileRules(self, to, inner_path, use_my_cert=False, content=None): @@ -725,7 +747,7 @@ class UiWebsocket(object): rules = self.site.content_manager.getRules(inner_path, content) if inner_path.endswith("content.json") and rules: if content: - rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content.get("files", {}).values()]) + rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in list(content.get("files", {}).values())]) else: rules["current_size"] = 0 return self.response(to, rules) @@ -749,11 +771,11 @@ class UiWebsocket(object): self.cmd( "confirm", [body, _("Change it to {auth_type}/{auth_user_name}@{domain}")], - lambda (res): self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) + lambda res: self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) ) else: self.response(to, "Not changed") - except Exception, err: + except Exception as err: self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err))) self.response(to, {"error": err.message}) @@ -781,7 +803,7 @@ class UiWebsocket(object): if not accepted_domains and not accepted_pattern: # Accept any if no filter defined accept_any = True - for domain, cert in self.user.certs.items(): + for domain, cert in list(self.user.certs.items()): if auth_address == cert["auth_address"] and domain == site_data.get("cert"): active = domain title = cert["auth_user_name"] + "@" + domain @@ -797,7 +819,7 @@ class UiWebsocket(object): for domain, account, css_class in accounts: if domain == active: css_class += " active" # Currently selected option - title = _(u"%s ({_[currently selected]})") % account + title = _("%s ({_[currently selected]})") % account else: title = "%s" % account body += "%s" % (css_class, domain, title) @@ -807,7 +829,7 @@ class UiWebsocket(object): # body+= "Accepted authorization providers by the site:" body += "
    " for domain in more_domains: - body += _(u""" + body += _(""" {_[Register]} »{domain} @@ -827,6 +849,7 @@ class UiWebsocket(object): # - Admin actions - + @flag.admin def actionPermissionAdd(self, to, permission): if permission not in self.site.settings["permissions"]: self.site.settings["permissions"].append(permission) @@ -834,31 +857,37 @@ class UiWebsocket(object): self.site.updateWebsocket(permission_added=permission) self.response(to, "ok") + @flag.admin def actionPermissionRemove(self, to, permission): self.site.settings["permissions"].remove(permission) self.site.saveSettings() self.site.updateWebsocket(permission_removed=permission) self.response(to, "ok") + @flag.admin def actionPermissionDetails(self, to, permission): if permission == "ADMIN": self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") elif permission == "NOSANDBOX": self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") + elif permission == "PushNotification": + self.response(to, _["Send notifications"]) else: self.response(to, "") # Set certificate that used for authenticate user for site + @flag.admin def actionCertSet(self, to, domain): self.user.setCert(self.site.address, domain) self.site.updateWebsocket(cert_changed=domain) self.response(to, "ok") # List user's certificates + @flag.admin def actionCertList(self, to): back = [] auth_address = self.user.getAuthAddress(self.site.address) - for domain, cert in self.user.certs.items(): + for domain, cert in list(self.user.certs.items()): back.append({ "auth_address": cert["auth_address"], "auth_type": cert["auth_type"], @@ -869,24 +898,27 @@ class UiWebsocket(object): return back # List all site info + @flag.admin def actionSiteList(self, to, connecting_sites=False): ret = [] - SiteManager.site_manager.load() # Reload sites - for site in self.server.sites.values(): + for site in list(self.server.sites.values()): if not site.content_manager.contents.get("content.json") and not connecting_sites: continue # Incomplete site ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing self.response(to, ret) # Join to an event channel on all sites + @flag.admin def actionChannelJoinAllsite(self, to, channel): if channel not in self.channels: # Add channel to channels self.channels.append(channel) - for site in self.server.sites.values(): # Add websocket to every channel + for site in list(self.server.sites.values()): # Add websocket to every channel if self not in site.websockets: site.websockets.append(self) + self.response(to, "ok") + # Update site content.json def actionSiteUpdate(self, to, address, check_files=False, since=None, announce=False): def updateThread(): @@ -904,6 +936,7 @@ class UiWebsocket(object): self.response(to, {"error": "Unknown site: %s" % address}) # Pause site serving + @flag.admin def actionSitePause(self, to, address): site = self.server.sites.get(address) if site: @@ -916,6 +949,7 @@ class UiWebsocket(object): self.response(to, {"error": "Unknown site: %s" % address}) # Resume site serving + @flag.admin def actionSiteResume(self, to, address): site = self.server.sites.get(address) if site: @@ -928,6 +962,8 @@ class UiWebsocket(object): else: self.response(to, {"error": "Unknown site: %s" % address}) + @flag.admin + @flag.no_multiuser def actionSiteDelete(self, to, address): site = self.server.sites.get(address) if site: @@ -939,15 +975,17 @@ class UiWebsocket(object): else: self.response(to, {"error": "Unknown site: %s" % address}) - def cbSiteClone(self, to, address, root_inner_path="", target_address=None): + def cbSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): self.cmd("notification", ["info", _["Cloning site..."]]) site = self.server.sites.get(address) + response = {} if target_address: target_site = self.server.sites.get(target_address) privatekey = self.user.getSiteData(target_site.address).get("privatekey") site.clone(target_address, privatekey, root_inner_path=root_inner_path) self.cmd("notification", ["done", _["Site source code upgraded!"]]) site.publish() + response = {"address": target_address} else: # Generate a new site from user's bip32 seed new_address, new_address_index, new_site_data = self.user.getNewSiteData() @@ -955,11 +993,15 @@ class UiWebsocket(object): new_site.settings["own"] = True new_site.saveSettings() self.cmd("notification", ["done", _["Site cloned"]]) - self.cmd("redirect", "/%s" % new_address) + if redirect: + self.cmd("redirect", "/%s" % new_address) gevent.spawn(new_site.announce) + response = {"address": new_address} + self.response(to, response) return "ok" - def actionSiteClone(self, to, address, root_inner_path="", target_address=None): + @flag.no_multiuser + def actionSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): if not SiteManager.site_manager.isAddress(address): self.response(to, {"error": "Not a site: %s" % address}) return @@ -970,21 +1012,23 @@ class UiWebsocket(object): site = self.server.sites.get(address) if site.bad_files: - for bad_inner_path in site.bad_files.keys(): + for bad_inner_path in list(site.bad_files.keys()): is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path) - if not is_user_file: + if not is_user_file and bad_inner_path != "content.json": self.cmd("notification", ["error", _["Clone error: Site still in sync"]]) return {"error": "Site still in sync"} if "ADMIN" in self.getPermissions(to): - self.cbSiteClone(to, address, root_inner_path, target_address) + self.cbSiteClone(to, address, root_inner_path, target_address, redirect) else: self.cmd( "confirm", [_["Clone site %s?"] % address, _["Clone"]], - lambda (res): self.cbSiteClone(to, address, root_inner_path, target_address) + lambda res: self.cbSiteClone(to, address, root_inner_path, target_address, redirect) ) + @flag.admin + @flag.no_multiuser def actionSiteSetLimit(self, to, size_limit): self.site.settings["size_limit"] = int(size_limit) self.site.saveSettings() @@ -992,6 +1036,7 @@ class UiWebsocket(object): self.site.updateWebsocket() self.site.download(blind_includes=True) + @flag.admin def actionSiteAdd(self, to, address): site_manager = SiteManager.site_manager if address in site_manager.sites: @@ -1002,18 +1047,25 @@ class UiWebsocket(object): else: return {"error": "Invalid address"} + @flag.async_run def actionSiteListModifiedFiles(self, to, content_inner_path="content.json"): - content = self.site.content_manager.contents[content_inner_path] + content = self.site.content_manager.contents.get(content_inner_path) + if not content: + return {"error": "content file not avaliable"} + min_mtime = content.get("modified", 0) site_path = self.site.storage.directory modified_files = [] # Load cache if not signed since last modified check - if content.get("modified", 0) < self.site.settings["cache"].get("time_modified_files_check"): + if content.get("modified", 0) < self.site.settings["cache"].get("time_modified_files_check", 0): min_mtime = self.site.settings["cache"].get("time_modified_files_check") modified_files = self.site.settings["cache"].get("modified_files", []) - inner_paths = [content_inner_path] + content.get("includes", {}).keys() + content.get("files", {}).keys() + inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys()) + + if len(inner_paths) > 100: + return {"error": "Too many files in content.json"} for relative_inner_path in inner_paths: inner_path = helper.getDirname(content_inner_path) + relative_inner_path @@ -1052,7 +1104,7 @@ class UiWebsocket(object): self.site.settings["cache"]["modified_files"] = modified_files return {"modified_files": modified_files} - + @flag.admin def actionSiteSetSettingsValue(self, to, key, value): if key not in ["modified_files_notification"]: return {"error": "Can't change this key"} @@ -1073,29 +1125,74 @@ class UiWebsocket(object): settings = self.user.settings self.response(to, settings) + @flag.admin def actionUserSetGlobalSettings(self, to, settings): self.user.settings = settings self.user.save() self.response(to, "ok") - def actionServerUpdate(self, to): - self.cmd("updating") - sys.modules["main"].update_after_shutdown = True - SiteManager.site_manager.save() - sys.modules["main"].file_server.stop() - sys.modules["main"].ui_server.stop() + @flag.admin + @flag.no_multiuser + def actionServerErrors(self, to): + return config.error_logger.lines + @flag.admin + @flag.no_multiuser + def actionServerUpdate(self, to): + def cbServerUpdate(res): + self.response(to, res) + if not res: + return False + for websocket in self.server.websockets: + websocket.cmd( + "notification", + ["info", _["Updating ZeroNet client, will be back in a few minutes..."], 20000] + ) + websocket.cmd("updating") + + import main + main.update_after_shutdown = True + main.restart_after_shutdown = True + SiteManager.site_manager.save() + main.file_server.stop() + main.ui_server.stop() + + self.cmd( + "confirm", + [_["Update ZeroNet client to latest version?"], _["Update"]], + cbServerUpdate + ) + + @flag.admin + @flag.async_run + @flag.no_multiuser def actionServerPortcheck(self, to): - file_server = sys.modules["main"].file_server + import main + file_server = main.file_server file_server.portCheck() self.response(to, file_server.port_opened) + @flag.admin + @flag.no_multiuser def actionServerShutdown(self, to, restart=False): - if restart: - sys.modules["main"].restart_after_shutdown = True - sys.modules["main"].file_server.stop() - sys.modules["main"].ui_server.stop() + import main + def cbServerShutdown(res): + self.response(to, res) + if not res: + return False + if restart: + main.restart_after_shutdown = True + main.file_server.stop() + main.ui_server.stop() + if restart: + message = [_["Restart ZeroNet client?"], _["Restart"]] + else: + message = [_["Shut down ZeroNet client?"], _["Shut down"]] + self.cmd("confirm", message, cbServerShutdown) + + @flag.admin + @flag.no_multiuser def actionServerShowdirectory(self, to, directory="backup", inner_path=""): if self.request.env["REMOTE_ADDR"] != "127.0.0.1": return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"}) @@ -1115,9 +1212,14 @@ class UiWebsocket(object): else: return self.response(to, {"error": "Not a directory"}) + @flag.admin + @flag.no_multiuser def actionConfigSet(self, to, key, value): + import main + + self.log.debug("Changing config %s value to %r" % (key, value)) if key not in config.keys_api_change_allowed: - self.response(to, {"error": "Forbidden you cannot set this config key"}) + self.response(to, {"error": "Forbidden: You cannot set this config key"}) return if key == "open_browser": @@ -1155,7 +1257,7 @@ class UiWebsocket(object): value = False else: value = True - tor_manager = sys.modules["main"].file_server.tor_manager + tor_manager = main.file_server.tor_manager tor_manager.request("SETCONF UseBridges=%i" % value) if key == "trackers_file": @@ -1165,6 +1267,12 @@ class UiWebsocket(object): logging.getLogger('').setLevel(logging.getLevelName(config.log_level)) if key == "ip_external": - gevent.spawn(sys.modules["main"].file_server.portCheck) + gevent.spawn(main.file_server.portCheck) + + if key == "offline": + if value: + main.file_server.closeConnections() + else: + gevent.spawn(main.file_server.checkSites, check_files=False, force_port_check=True) self.response(to, "ok") diff --git a/src/Ui/__init__.py b/src/Ui/__init__.py index 9982dc4f..dcb8896d 100644 --- a/src/Ui/__init__.py +++ b/src/Ui/__init__.py @@ -1,3 +1,3 @@ -from UiServer import UiServer -from UiRequest import UiRequest -from UiWebsocket import UiWebsocket \ No newline at end of file +from .UiServer import UiServer +from .UiRequest import UiRequest +from .UiWebsocket import UiWebsocket \ No newline at end of file diff --git a/src/Ui/media/Infopanel.coffee b/src/Ui/media/Infopanel.coffee index eb17eae7..3a490364 100644 --- a/src/Ui/media/Infopanel.coffee +++ b/src/Ui/media/Infopanel.coffee @@ -3,15 +3,22 @@ class Infopanel @visible = false show: (closed=false) => - @elem.addClass("visible") + @elem.parent().addClass("visible") if closed @close() else @open() + unfold: => + @elem.toggleClass("unfolded") + return false + updateEvents: => @elem.off("click") @elem.find(".close").off("click") + @elem.find(".line").off("click") + + @elem.find(".line").on("click", @unfold) if @elem.hasClass("closed") @elem.on "click", => @@ -23,7 +30,7 @@ class Infopanel @close() hide: => - @elem.removeClass("visible") + @elem.parent().removeClass("visible") close: => @elem.addClass("closed") diff --git a/src/Ui/media/Loading.coffee b/src/Ui/media/Loading.coffee index 7cd2479d..8e35ce66 100644 --- a/src/Ui/media/Loading.coffee +++ b/src/Ui/media/Loading.coffee @@ -2,15 +2,18 @@ class Loading constructor: (@wrapper) -> if window.show_loadingscreen then @showScreen() @timer_hide = null + @timer_set = null setProgress: (percent) -> if @timer_hide clearInterval @timer_hide - RateLimit 200, -> + @timer_set = RateLimit 500, -> $(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block") hideProgress: -> - console.log "hideProgress" + @log "hideProgress" + if @timer_set + clearInterval @timer_set @timer_hide = setTimeout ( => $(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000) ), 300 @@ -23,6 +26,7 @@ class Loading showTooLarge: (site_info) -> + @log "Displaying large site confirmation" if $(".console .button-setlimit").length == 0 # Not displaying it yet line = @printLine("Site size: #{parseInt(site_info.settings.size/1024/1024)}MB is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning") button = $("" + "Open site and set size limit to #{site_info.next_size_limit}MB" + "") @@ -52,7 +56,7 @@ class Loading # We dont need loadingscreen anymore hideScreen: -> - console.log "hideScreen" + @log "hideScreen" if not $(".loadingscreen").hasClass("done") # Only if its not animating already if @screen_visible # Hide with animate $(".loadingscreen").addClass("done").removeLater(2000) @@ -80,6 +84,8 @@ class Loading if type == "warning" then line.addClass("console-warning") return line + log: (args...) -> + console.log "[Loading]", args... window.Loading = Loading diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee index 393d5a44..35d949f3 100644 --- a/src/Ui/media/Notifications.coffee +++ b/src/Ui/media/Notifications.coffee @@ -37,7 +37,7 @@ class Notifications $(".notification-icon", elem).html("i") if typeof(body) == "string" - $(".body", elem).html(""+body+"") + $(".body", elem).html("
    "+body+"
    ") else $(".body", elem).html("").append(body) @@ -51,13 +51,13 @@ class Notifications ), timeout # Animate - width = elem.outerWidth() + width = Math.min(elem.outerWidth() + 70, 580) if not timeout then width += 20 # Add space for close button if elem.outerHeight() > 55 then elem.addClass("long") elem.css({"width": "50px", "transform": "scale(0.01)"}) elem.animate({"scale": 1}, 800, "easeOutElastic") elem.animate({"width": width}, 700, "easeInOutCubic") - $(".body", elem).css("width": (width - 80)) + $(".body", elem).css("width": (width - 50)) $(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000) # Close button or Confirm button diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee index 5d1f2d7d..1b98855e 100644 --- a/src/Ui/media/Wrapper.coffee +++ b/src/Ui/media/Wrapper.coffee @@ -33,6 +33,8 @@ class Wrapper @address = null @opener_tested = false @announcer_line = null + @web_notifications = {} + @is_title_changed = false @allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors @@ -62,6 +64,9 @@ class Wrapper # Incoming message from UiServer websocket onMessageWebsocket: (e) => message = JSON.parse(e.data) + @handleMessageWebsocket(message) + + handleMessageWebsocket: (message) => cmd = message.cmd if cmd == "response" if @ws.waiting_cb[message.to]? # We are waiting for response @@ -95,6 +100,7 @@ class Wrapper else if cmd == "error" @notifications.add("notification-#{message.id}", "error", message.params, 0) else if cmd == "updating" # Close connection + @log "Updating: Closing websocket" @ws.ws.close() @ws.onCloseWebsocket(null, 4000) else if cmd == "redirect" @@ -166,7 +172,9 @@ class Wrapper else if cmd == "wrapperSetViewport" # Set the viewport @actionSetViewport(message) else if cmd == "wrapperSetTitle" + @log "wrapperSetTitle", message.params $("head title").text(message.params) + @is_title_changed = true else if cmd == "wrapperReload" # Reload current page @actionReload(message) else if cmd == "wrapperGetLocalStorage" @@ -189,6 +197,10 @@ class Wrapper @actionPermissionAdd(message) else if cmd == "wrapperRequestFullscreen" @actionRequestFullscreen() + else if cmd == "wrapperWebNotification" + @actionWebNotification(message) + else if cmd == "wrapperCloseWebNotification" + @actionCloseWebNotification(message) else # Send to websocket if message.id < 1000000 if message.cmd == "fileWrite" and not @modified_panel_updater_timer and site_info?.settings?.own @@ -235,6 +247,40 @@ class Wrapper request_fullscreen = elem.requestFullScreen || elem.webkitRequestFullscreen || elem.mozRequestFullScreen || elem.msRequestFullScreen request_fullscreen.call(elem) + actionWebNotification: (message) -> + $.when(@event_site_info).done => + # Check that the wrapper may send notifications + if Notification.permission == "granted" + @displayWebNotification message + else if Notification.permission == "denied" + res = {"error": "Web notifications are disabled by the user"} + @sendInner {"cmd": "response", "to": message.id, "result": res} + else + Notification.requestPermission().then (permission) => + if permission == "granted" + @displayWebNotification message + + actionCloseWebNotification: (message) -> + $.when(@event_site_info).done => + id = message.params[0] + @web_notifications[id].close() + + displayWebNotification: (message) -> + title = message.params[0] + id = message.params[1] + options = message.params[2] + notification = new Notification(title, options) + @web_notifications[id] = notification + notification.onshow = () => + @sendInner {"cmd": "response", "to": message.id, "result": "ok"} + notification.onclick = (e) => + if not options.focus_tab + e.preventDefault() + @sendInner {"cmd": "webNotificationClick", "params": {"id": id}} + notification.onclose = () => + @sendInner {"cmd": "webNotificationClose", "params": {"id": id}} + delete @web_notifications[id] + actionPermissionAdd: (message) -> permission = message.params $.when(@event_site_info).done => @@ -315,9 +361,8 @@ class Wrapper @displayPrompt message.params[0], type, caption, placeholder, (res) => @sendInner {"cmd": "response", "to": message.id, "result": res} # Response to confirm - actionProgress: (message) -> - message.params = @toHtmlSafe(message.params) # Escape html - percent = Math.min(100, message.params[2])/100 + displayProgress: (type, body, percent) -> + percent = Math.min(100, percent)/100 offset = 75-(percent*75) circle = """
    @@ -325,22 +370,22 @@ class Wrapper
    """ - body = ""+message.params[1]+"" + circle - elem = $(".notification-#{message.params[0]}") + body = ""+body+"" + circle + elem = $(".notification-#{type}") if elem.length width = $(".body .message", elem).outerWidth() - $(".body .message", elem).html(message.params[1]) + $(".body .message", elem).html(body) if $(".body .message", elem).css("width") == "" $(".body .message", elem).css("width", width) $(".body .circle-fg", elem).css("stroke-dashoffset", offset) else - elem = @notifications.add(message.params[0], "progress", $(body)) + elem = @notifications.add(type, "progress", $(body)) if percent > 0 $(".body .circle-bg", elem).css {"animation-play-state": "paused", "stroke-dasharray": "180px"} if $(".notification-icon", elem).data("done") return false - else if message.params[2] >= 100 # Done + else if percent >= 1 # Done $(".circle-fg", elem).css("transition", "all 0.3s ease-in-out") setTimeout (-> $(".notification-icon", elem).css {transform: "scale(1)", opacity: 1} @@ -350,7 +395,7 @@ class Wrapper @notifications.close elem ), 3000 $(".notification-icon", elem).data("done", true) - else if message.params[2] < 0 # Error + else if percent < 0 # Error $(".body .circle-fg", elem).css("stroke", "#ec6f47").css("transition", "transition: all 0.3s ease-in-out") setTimeout (=> $(".notification-icon", elem).css {transform: "scale(1)", opacity: 1} @@ -360,6 +405,10 @@ class Wrapper $(".notification-icon", elem).data("done", true) + actionProgress: (message) -> + message.params = @toHtmlSafe(message.params) # Escape html + @displayProgress(message.params[0], message.params[1], message.params[2]) + actionSetViewport: (message) -> @log "actionSetViewport", message if $("#viewport").length > 0 @@ -371,11 +420,13 @@ class Wrapper @reload(message.params[0]) reload: (url_post="") -> + @log "Reload" + current_url = window.location.toString().replace(/#.*/g, "") if url_post - if window.location.toString().indexOf("?") > 0 - window.location += "&"+url_post + if current_url.indexOf("?") > 0 + window.location = current_url + "&" + url_post else - window.location += "?"+url_post + window.location = current_url + "?" + url_post else window.location.reload() @@ -445,13 +496,14 @@ class Wrapper # Iframe loaded onPageLoad: (e) => + @log "onPageLoad" @inner_loaded = true if not @inner_ready then @sendInner {"cmd": "wrapperReady"} # Inner frame loaded before wrapper #if not @site_error then @loading.hideScreen() # Hide loading screen if @ws.ws.readyState == 1 and not @site_info # Ws opened @reloadSiteInfo() - else if @site_info and @site_info.content?.title? - window.document.title = @site_info.content.title+" - ZeroNet" + else if @site_info and @site_info.content?.title? and not @is_title_changed + window.document.title = @site_info.content.title + " - ZeroNet" @log "Setting title to", window.document.title onWrapperLoad: => @@ -479,16 +531,13 @@ class Wrapper @address = site_info.address @setSiteInfo site_info - if site_info.settings.size > site_info.size_limit*1024*1024 # Site size too large and not displaying it yet - if @loading.screen_visible - @loading.showTooLarge(site_info) - else - @displayConfirm "Site is larger than allowed: #{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB", "Set limit to #{site_info.next_size_limit}MB", => - @ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) => - if res == "ok" - @notifications.add("size_limit", "done", "Site storage limit modified!", 5000) + if site_info.settings.size > site_info.size_limit * 1024 * 1024 and not @loading.screen_visible # Site size too large and not displaying it yet + @displayConfirm "Site is larger than allowed: #{(site_info.settings.size/1024/1024).toFixed(1)}MB/#{site_info.size_limit}MB", "Set limit to #{site_info.next_size_limit}MB", => + @ws.cmd "siteSetLimit", [site_info.next_size_limit], (res) => + if res == "ok" + @notifications.add("size_limit", "done", "Site storage limit modified!", 5000) - if site_info.content?.title? + if site_info.content?.title? and not @is_title_changed window.document.title = site_info.content.title + " - ZeroNet" @log "Setting title to", window.document.title @@ -505,9 +554,9 @@ class Wrapper if site_info.event[1] == window.file_inner_path # File downloaded we currently on @loading.hideScreen() if not @site_info then @reloadSiteInfo() - if site_info.content - window.document.title = site_info.content.title+" - ZeroNet" - @log "Required file done, setting title to", window.document.title + if site_info.content and not @is_title_changed + window.document.title = site_info.content.title + " - ZeroNet" + @log "Required file #{window.file_inner_path} done, setting title to", window.document.title if not window.show_loadingscreen @notifications.add("modified", "info", "New version of this page has just released.
    Reload to see the modified content.") # File failed downloading @@ -537,12 +586,17 @@ class Wrapper @notifications.add("size_limit", "done", "Site storage limit modified!", 5000) return false - if @loading.screen_visible and @inner_loaded and site_info.settings.size < site_info.size_limit*1024*1024 and site_info.settings.size > 0 # Loading screen still visible, but inner loaded + if @loading.screen_visible and @inner_loaded and site_info.settings.size < site_info.size_limit * 1024 * 1024 and site_info.settings.size > 0 # Loading screen still visible, but inner loaded + @log "Loading screen visible, but inner loaded" @loading.hideScreen() if site_info?.settings?.own and site_info?.settings?.modified != @site_info?.settings?.modified @updateModifiedPanel() + if @loading.screen_visible and site_info.settings.size > site_info.size_limit * 1024 * 1024 + @log "Site too large" + @loading.showTooLarge(site_info) + @site_info = site_info @event_site_info.resolve() @@ -572,7 +626,7 @@ class Wrapper updateModifiedPanel: => @ws.cmd "siteListModifiedFiles", [], (res) => - num = res.modified_files.length + num = res.modified_files?.length if num > 0 closed = @site_info.settings.modified_files_notification == false @infopanel.show(closed) @@ -591,8 +645,7 @@ class Wrapper @notifications.add "sign", "done", "content.json Signed!", 5000 @sitePublish("content.json") return false - - @log "siteListModifiedFiles", res + @log "siteListModifiedFiles", num, res setAnnouncerInfo: (announcer_info) -> status_db = {announcing: [], error: [], announced: []} @@ -605,7 +658,7 @@ class Wrapper else @announcer_line = @loading.printLine(status_line) - if status_db.error.length > (status_db.announced.length + status_db.announcing.length) + if status_db.error.length > (status_db.announced.length + status_db.announcing.length) and status_db.announced.length < 3 @loading.showTrackerTorBridge(@server_info) updateProgress: (site_info) -> @@ -628,11 +681,13 @@ class Wrapper setSizeLimit: (size_limit, reload=true) => + @log "setSizeLimit: #{size_limit}, reload: #{reload}" + @inner_loaded = false # Inner frame not loaded, just a 404 page displayed @ws.cmd "siteSetLimit", [size_limit], (res) => if res != "ok" return false @loading.printLine res - @inner_loaded = false # Inner frame not loaded, just a 404 page displayed + @inner_loaded = false if reload then @reloadIframe() return false @@ -653,7 +708,7 @@ if origin.indexOf("https:") == 0 else proto = { ws: 'ws', http: 'http' } -ws_url = proto.ws + ":" + origin.replace(proto.http+":", "") + "/Websocket?wrapper_key=" + window.wrapper_key +ws_url = proto.ws + ":" + origin.replace(proto.http+":", "") + "/ZeroNet-Internal/Websocket?wrapper_key=" + window.wrapper_key window.wrapper = new Wrapper(ws_url) diff --git a/src/Ui/media/Wrapper.css b/src/Ui/media/Wrapper.css index 22df41e2..67e35a84 100644 --- a/src/Ui/media/Wrapper.css +++ b/src/Ui/media/Wrapper.css @@ -8,7 +8,10 @@ a { color: black } #inner-iframe { width: 100%; height: 100%; position: absolute; border: 0; } /*; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out*/ #inner-iframe.back { transform: scale(0.95) translate(-300px, 0); opacity: 0.4 } -.button { padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; border-radius: 2px; text-decoration: none; transition: all 0.5s; background-position: left center; } +.button { + padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; + border-radius: 2px; text-decoration: none; transition: all 0.5s; background-position: left center; white-space: nowrap; +} .button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; transition: none } .button:active { position: relative; top: 1px } .button:focus { outline: none } @@ -44,34 +47,39 @@ a { color: black } .notifications { position: absolute; top: 0; right: 80px; display: inline-block; z-index: 999; white-space: nowrap } .notification { - position: relative; float: right; clear: both; margin: 10px; box-sizing: border-box; overflow: hidden; backface-visibility: hidden; perspective: 1000px; padding-bottom: 5px; - color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ + position: relative; float: right; clear: both; margin: 10px; box-sizing: border-box; overflow: hidden; backface-visibility: hidden; + perspective: 1000px; padding-bottom: 5px; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; + font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ } .notification-icon { display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 2; text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white; } .notification .body { - padding-left: 14px; padding-right: 60px; height: 40px; vertical-align: middle; display: table; + border-right: 40px solid transparent; padding-left: 14px; padding-right: 60px; height: 50px; vertical-align: middle; display: table; padding-right: 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; background-color: white; left: 50px; top: 0; position: relative; padding-top: 5px; padding-bottom: 5px; } .notification .message-outer { display: table-row } -.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; } +.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; padding-right: 20px; text-align: right; } .notification.long .body { padding-top: 10px; padding-bottom: 10px } -.notification .message { display: table-cell; vertical-align: middle; } +.notification .message { display: table-cell; vertical-align: middle; max-width: 500px; white-space: normal; } .notification.visible { max-width: 350px } -.notification .close { position: absolute; top: 0; right: 0; font-size: 19px; line-height: 13px; color: #DDD; padding: 7px; text-decoration: none } -.notification .close:hover { color: black } -.notification .close:active, .notification .close:focus { color: #AF3BFF } +.notification .close:hover { opacity: 0.8 } +.notification .close { + position: absolute; top: 0; right: 0; text-decoration: none; margin: 10px; padding: 0px; display: block; width: 30px; height: 30px; + text-align: center; background-color: tomato; line-height: 30px; vertical-align: bottom; font-size: 30px; color: white; +} + .notification small { color: #AAA } +.notification .multiline { white-space: normal; word-break: break-word; max-width: 300px; } .body-white .notification { box-shadow: 0 1px 9px rgba(0,0,0,0.1) } /* Notification select */ .notification .select { display: block; padding: 10px; margin-right: -32px; text-decoration: none; border-left: 3px solid #EEE; - margin-top: 1px; transition: all 0.3s; color: #666 + margin-top: 10px; transition: all 0.3s; color: #666 } .notification .select:hover, .notification .select.active { background-color: #007AFF; border-left: 3px solid #5D68FF; color: white; transition: none } .notification .select:active, .notification .select:focus { background-color: #3396FF; color: white; transition: none; border-left-color: #3396FF } @@ -105,12 +113,14 @@ a { color: black } /* Infopanel */ +.infopanel-container { width: 100%; height: 100%; overflow: hidden; position: absolute; display: none; } +.infopanel-container.visible { display: block; } .infopanel { - position: absolute; z-index: 999; padding: 15px 15px; bottom: 55px; right: 50px; border: 1px solid #eff3fe; display: none; - font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); background-color: white; - border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); + position: absolute; z-index: 999; padding: 15px 15px; bottom: 25px; right: 50px; border: 1px solid #eff3fe; + font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); + background-color: white; border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; + transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); } -.infopanel.visible { display: block; } .infopanel.closed { box-shadow: none; transform: translateX(100%); right: 0px; cursor: pointer; } .infopanel .message { font-size: 13px; line-height: 15px; display: inline-block; vertical-align: -9px; } .infopanel .message .line { max-width: 200px; display: inline-block; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; } @@ -120,16 +130,17 @@ a { color: black } .infopanel .close:active, .infopanel .close:focus { color: #AF3BFF } .infopanel.closed .closed-num { opacity: 1; margin-left: -36px; pointer-events: inherit; } .infopanel .closed-num { - position: absolute; margin-top: 6px; background-color: #9a61f8; color: white; width: 10px; text-align: center; + position: absolute; margin-top: 6px; background-color: #6666663d; color: white; width: 10px; text-align: center; padding: 4px; border-top-left-radius: 6px; border-bottom-left-radius: 6px; font-size: 10px; opacity: 0; margin-left: 0px; pointer-events: none; transition: all 0.6s; } +.infopanel.unfolded .message .line { overflow: visible; white-space: normal; } .body-sidebar .infopanel { right: 425px; } .body-sidebar .infopanel.closed { right: 0px; } /* Loading screen */ -.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: hidden; display: none } +.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: auto; display: none } .theme-dark .loadingscreen { background-color: #180922; } .loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% } .loading-config { @@ -141,16 +152,16 @@ a { color: black } .loadingscreen.ready .loading-config { top: 0px; } -/* Console */ -.console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; transform: translateY(-20px); } -.console-line:last-child { color: #6C6767 } -.console .cursor { +/* Loading console */ +.loadingscreen .console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; transform: translateY(-20px); } +.loadingscreen .console-line:last-child { color: #6C6767 } +.loadingscreen .console .cursor { background-color: #999; color: #999; animation: pulse 1.5s infinite ease-in-out; margin-right: -9px; display: inline-block; width: 9px; height: 19px; vertical-align: -4px; } -.console .console-error { color: #e74c3c; font-weight: bold; animation: pulse 2s infinite linear } -.console .console-warning { color: #8e44ad; } -.console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } +.loadingscreen .console .console-error { color: #e74c3c; font-weight: bold; animation: pulse 2s infinite linear } +.loadingscreen .console .console-warning { color: #8e44ad; } +.loadingscreen .console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } /* Flipper loading anim */ @@ -175,7 +186,7 @@ a { color: black } .progressbar { background: #26C281; position: fixed; width: 100%; z-index: 100; top: 0; left: 0; transform: scaleX(0); transform-origin: 0% 0%; transform:translate3d(0,0,0); - height: 2px; transition: transform 0.5s, opacity 1s; display: none; backface-visibility: hidden; transform-style: preserve-3d; + height: 2px; transition: transform 1s, opacity 1s; display: none; backface-visibility: hidden; transform-style: preserve-3d; } .progressbar .peg { display: block; position: absolute; right: 0; width: 100px; height: 100%; @@ -215,8 +226,8 @@ a { color: black } /* Small screen */ @media screen and (max-width: 600px) { .notification .message { white-space: normal; } - .notification .buttons { padding-right: 22px; } + .notification .buttons { padding-right: 22px; padding-right: 40px; } .notification .button { white-space: nowrap; } - .notification { margin: 0px } - .notifications { right: 0px } + .notification { margin: 0px; } + .notifications { right: 0px; max-width: 80%; } } diff --git a/src/Ui/media/ZeroSiteTheme.coffee b/src/Ui/media/ZeroSiteTheme.coffee new file mode 100644 index 00000000..79adb671 --- /dev/null +++ b/src/Ui/media/ZeroSiteTheme.coffee @@ -0,0 +1,49 @@ +DARK = "(prefers-color-scheme: dark)" +LIGHT = "(prefers-color-scheme: light)" + +mqDark = window.matchMedia(DARK) +mqLight = window.matchMedia(LIGHT) + + +changeColorScheme = (theme) -> + zeroframe.cmd "userGetGlobalSettings", [], (user_settings) -> + if user_settings.theme != theme + user_settings.theme = theme + zeroframe.cmd "userSetGlobalSettings", [user_settings], (status) -> + if status == "ok" + location.reload() + return + return + return + + +displayNotification = ({matches, media}) -> + if !matches + return + + zeroframe.cmd "siteInfo", [], (site_info) -> + if "ADMIN" in site_info.settings.permissions + zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.
    Please reload site to use it."] + else + zeroframe.cmd "wrapperNotification", ["info", "Your system's theme has been changed.
    Please open ZeroHello to use it."] + return + return + + +detectColorScheme = -> + if mqDark.matches + changeColorScheme("dark") + else if mqLight.matches + changeColorScheme("light") + + mqDark.addListener(displayNotification) + mqLight.addListener(displayNotification) + + return + + +zeroframe.cmd "userGetGlobalSettings", [], (user_settings) -> + if user_settings.use_system_theme == true + detectColorScheme() + + return diff --git a/src/Ui/media/all.css b/src/Ui/media/all.css index 9964349b..bd54cf34 100644 --- a/src/Ui/media/all.css +++ b/src/Ui/media/all.css @@ -1,6 +1,5 @@ - -/* ---- src/Ui/media/Wrapper.css ---- */ +/* ---- Wrapper.css ---- */ body { margin: 0; padding: 0; height: 100%; background-color: #D2CECD; overflow: hidden } @@ -13,7 +12,10 @@ a { color: black } #inner-iframe { width: 100%; height: 100%; position: absolute; border: 0; } /*; transition: all 0.8s cubic-bezier(0.68, -0.55, 0.265, 1.55), opacity 0.8s ease-in-out*/ #inner-iframe.back { -webkit-transform: scale(0.95) translate(-300px, 0); -moz-transform: scale(0.95) translate(-300px, 0); -o-transform: scale(0.95) translate(-300px, 0); -ms-transform: scale(0.95) translate(-300px, 0); transform: scale(0.95) translate(-300px, 0) ; opacity: 0.4 } -.button { padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; text-decoration: none; -webkit-transition: all 0.5s; -moz-transition: all 0.5s; -o-transition: all 0.5s; -ms-transition: all 0.5s; transition: all 0.5s ; background-position: left center; } +.button { + padding: 5px 10px; margin-left: 10px; background-color: #FFF85F; border-bottom: 2px solid #CDBD1E; + -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; text-decoration: none; -webkit-transition: all 0.5s; -moz-transition: all 0.5s; -o-transition: all 0.5s; -ms-transition: all 0.5s; transition: all 0.5s ; background-position: left center; white-space: nowrap; +} .button:hover { background-color: #FFF400; border-bottom: 2px solid #4D4D4C; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none } .button:active { position: relative; top: 1px } .button:focus { outline: none } @@ -49,21 +51,22 @@ a { color: black } .notifications { position: absolute; top: 0; right: 80px; display: inline-block; z-index: 999; white-space: nowrap } .notification { - position: relative; float: right; clear: both; margin: 10px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; overflow: hidden; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; -webkit-perspective: 1000px; -moz-perspective: 1000px; -o-perspective: 1000px; -ms-perspective: 1000px; perspective: 1000px ; padding-bottom: 5px; - color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ + position: relative; float: right; clear: both; margin: 10px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; overflow: hidden; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; + -webkit-perspective: 1000px; -moz-perspective: 1000px; -o-perspective: 1000px; -ms-perspective: 1000px; perspective: 1000px ; padding-bottom: 5px; color: #4F4F4F; font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; + font-size: 14px; line-height: 20px; /*border: 1px solid rgba(210, 206, 205, 0.2)*/ } .notification-icon { display: block; width: 50px; height: 50px; position: absolute; float: left; z-index: 2; text-align: center; background-color: #e74c3c; line-height: 45px; vertical-align: bottom; font-size: 40px; color: white; } .notification .body { - padding-left: 14px; padding-right: 60px; height: 40px; vertical-align: middle; display: table; + padding-left: 14px; padding-right: 60px; height: 50px; vertical-align: middle; display: table; padding-right: 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; background-color: white; left: 50px; top: 0; position: relative; padding-top: 5px; padding-bottom: 5px; } .notification .message-outer { display: table-row } -.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; } +.notification .buttons { display: table-cell; vertical-align: top; padding-top: 9px; padding-right: 20px; text-align: right; } .notification.long .body { padding-top: 10px; padding-bottom: 10px } -.notification .message { display: table-cell; vertical-align: middle; } +.notification .message { display: table-cell; vertical-align: middle; max-width: 500px; white-space: normal; } .notification.visible { max-width: 350px } @@ -71,6 +74,7 @@ a { color: black } .notification .close:hover { color: black } .notification .close:active, .notification .close:focus { color: #AF3BFF } .notification small { color: #AAA } +.notification .multiline { white-space: normal; word-break: break-word; max-width: 300px; } .body-white .notification { -webkit-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; -moz-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; -o-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; -ms-box-shadow: 0 1px 9px rgba(0,0,0,0.1) ; box-shadow: 0 1px 9px rgba(0,0,0,0.1) } /* Notification select */ @@ -119,12 +123,14 @@ a { color: black } /* Infopanel */ +.infopanel-container { width: 100%; height: 100%; overflow: hidden; position: absolute; display: none; } +.infopanel-container.visible { display: block; } .infopanel { - position: absolute; z-index: 999; padding: 15px 15px; bottom: 55px; right: 50px; border: 1px solid #eff3fe; display: none; - font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; -webkit-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -moz-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -o-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -ms-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17) ; background-color: white; - border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; -webkit-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -moz-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -o-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -ms-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1) ; + position: absolute; z-index: 999; padding: 15px 15px; bottom: 25px; right: 50px; border: 1px solid #eff3fe; + font-family: 'Lucida Grande', 'Segoe UI', Helvetica, Arial, sans-serif; -webkit-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -moz-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -o-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); -ms-box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17); box-shadow: 0px 10px 55px rgba(58, 39, 176, 0.17) ; + background-color: white; border-left: 4px solid #9a61f8; border-top-left-radius: 4px; border-bottom-left-radius: 4px; + -webkit-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -moz-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -o-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); -ms-transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1); transition: all 0.8s cubic-bezier(0.215, 0.61, 0.355, 1) ; } -.infopanel.visible { display: block; } .infopanel.closed { -webkit-box-shadow: none; -moz-box-shadow: none; -o-box-shadow: none; -ms-box-shadow: none; box-shadow: none ; -webkit-transform: translateX(100%); -moz-transform: translateX(100%); -o-transform: translateX(100%); -ms-transform: translateX(100%); transform: translateX(100%) ; right: 0px; cursor: pointer; } .infopanel .message { font-size: 13px; line-height: 15px; display: inline-block; vertical-align: -9px; } .infopanel .message .line { max-width: 200px; display: inline-block; white-space: nowrap; text-overflow: ellipsis; overflow: hidden; } @@ -134,16 +140,17 @@ a { color: black } .infopanel .close:active, .infopanel .close:focus { color: #AF3BFF } .infopanel.closed .closed-num { opacity: 1; margin-left: -36px; pointer-events: inherit; } .infopanel .closed-num { - position: absolute; margin-top: 6px; background-color: #9a61f8; color: white; width: 10px; text-align: center; + position: absolute; margin-top: 6px; background-color: #6666663d; color: white; width: 10px; text-align: center; padding: 4px; border-top-left-radius: 6px; border-bottom-left-radius: 6px; font-size: 10px; opacity: 0; margin-left: 0px; pointer-events: none; -webkit-transition: all 0.6s; -moz-transition: all 0.6s; -o-transition: all 0.6s; -ms-transition: all 0.6s; transition: all 0.6s ; } +.infopanel.unfolded .message .line { overflow: visible; white-space: normal; } .body-sidebar .infopanel { right: 425px; } .body-sidebar .infopanel.closed { right: 0px; } /* Loading screen */ -.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: hidden; display: none } +.loadingscreen { width: 100%; height: 100%; position: absolute; background-color: #EEE; z-index: 1; overflow: auto; display: none } .theme-dark .loadingscreen { background-color: #180922; } .loading-text { text-align: center; vertical-align: middle; top: 50%; position: absolute; margin-top: 39px; width: 100% } .loading-config { @@ -155,16 +162,16 @@ a { color: black } .loadingscreen.ready .loading-config { top: 0px; } -/* Console */ -.console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; -webkit-transform: translateY(-20px); -moz-transform: translateY(-20px); -o-transform: translateY(-20px); -ms-transform: translateY(-20px); transform: translateY(-20px) ; } -.console-line:last-child { color: #6C6767 } -.console .cursor { +/* Loading console */ +.loadingscreen .console { line-height: 24px; font-family: monospace; font-size: 14px; color: #ADADAD; text-transform: uppercase; opacity: 0; -webkit-transform: translateY(-20px); -moz-transform: translateY(-20px); -o-transform: translateY(-20px); -ms-transform: translateY(-20px); transform: translateY(-20px) ; } +.loadingscreen .console-line:last-child { color: #6C6767 } +.loadingscreen .console .cursor { background-color: #999; color: #999; -webkit-animation: pulse 1.5s infinite ease-in-out; -moz-animation: pulse 1.5s infinite ease-in-out; -o-animation: pulse 1.5s infinite ease-in-out; -ms-animation: pulse 1.5s infinite ease-in-out; animation: pulse 1.5s infinite ease-in-out ; margin-right: -9px; display: inline-block; width: 9px; height: 19px; vertical-align: -4px; } -.console .console-error { color: #e74c3c; font-weight: bold; -webkit-animation: pulse 2s infinite linear ; -moz-animation: pulse 2s infinite linear ; -o-animation: pulse 2s infinite linear ; -ms-animation: pulse 2s infinite linear ; animation: pulse 2s infinite linear } -.console .console-warning { color: #8e44ad; } -.console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } +.loadingscreen .console .console-error { color: #e74c3c; font-weight: bold; -webkit-animation: pulse 2s infinite linear ; -moz-animation: pulse 2s infinite linear ; -o-animation: pulse 2s infinite linear ; -ms-animation: pulse 2s infinite linear ; animation: pulse 2s infinite linear } +.loadingscreen .console .console-warning { color: #8e44ad; } +.loadingscreen .console .button { margin: 20px; display: inline-block; text-transform: none; padding: 10px 20px } /* Flipper loading anim */ @@ -189,7 +196,7 @@ a { color: black } .progressbar { background: #26C281; position: fixed; width: 100%; z-index: 100; top: 0; left: 0; -webkit-transform: scaleX(0); -moz-transform: scaleX(0); -o-transform: scaleX(0); -ms-transform: scaleX(0); transform: scaleX(0) ; transform-origin: 0% 0%; transform:translate3d(0,0,0); - height: 2px; -webkit-transition: transform 0.5s, opacity 1s; -moz-transition: transform 0.5s, opacity 1s; -o-transition: transform 0.5s, opacity 1s; -ms-transition: transform 0.5s, opacity 1s; transition: transform 0.5s, opacity 1s ; display: none; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; transform-style: preserve-3d; + height: 2px; -webkit-transition: transform 1s, opacity 1s; -moz-transition: transform 1s, opacity 1s; -o-transition: transform 1s, opacity 1s; -ms-transition: transform 1s, opacity 1s; transition: transform 1s, opacity 1s ; display: none; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; transform-style: preserve-3d; } .progressbar .peg { display: block; position: absolute; right: 0; width: 100px; height: 100%; @@ -255,8 +262,8 @@ a { color: black } /* Small screen */ @media screen and (max-width: 600px) { .notification .message { white-space: normal; } - .notification .buttons { padding-right: 22px; } + .notification .buttons { padding-right: 22px; padding-right: 40px; } .notification .button { white-space: nowrap; } - .notification { margin: 0px } - .notifications { right: 0px } + .notification { margin: 0px; } + .notifications { right: 0px; max-width: 80%; } } diff --git a/src/Ui/media/all.js b/src/Ui/media/all.js index bf0bb8c9..f5ad947c 100644 --- a/src/Ui/media/all.js +++ b/src/Ui/media/all.js @@ -1,14 +1,12 @@ - -/* ---- src/Ui/media/lib/00-jquery.min.js ---- */ +/* ---- lib/00-jquery.min.js ---- */ /*! jQuery v3.3.1 | (c) JS Foundation and other contributors | jquery.org/license */ !function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use strict";var n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function e(t){return"function"==typeof t&&"number"!=typeof t.nodeType},y=function e(t){return null!=t&&t===t.window},v={type:!0,src:!0,noModule:!0};function m(e,t,n){var i,o=(t=t||r).createElement("script");if(o.text=e,n)for(i in v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[c.call(e)]||"object":typeof e}var b="3.3.1",w=function(e,t){return new w.fn.init(e,t)},T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.fn=w.prototype={jquery:"3.3.1",constructor:w,length:0,toArray:function(){return o.call(this)},get:function(e){return null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=w.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return w.each(this,e)},map:function(e){return this.pushStack(w.map(this,function(t,n){return e.call(t,n,t)}))},slice:function(){return this.pushStack(o.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(n>=0&&n0&&t-1 in e)}var E=function(e){var t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b="sizzle"+1*new Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var n=0,r=e.length;n+~]|"+M+")"+M+"*"),z=new RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),X=new RegExp(W),U=new RegExp("^"+R+"$"),V={ID:new RegExp("^#("+R+")"),CLASS:new RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new RegExp("^"+I),PSEUDO:new RegExp("^"+W),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+P+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},G=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Q=/^[^{]+\{\s*\[native \w/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,K=/[+~]/,Z=new RegExp("\\\\([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ee=function(e,t,n){var r="0x"+t-65536;return r!==r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023&r|56320)},te=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ne=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},re=function(){p()},ie=me(function(e){return!0===e.disabled&&("form"in e||"label"in e)},{dir:"parentNode",next:"legend"});try{L.apply(A=H.call(w.childNodes),w.childNodes),A[w.childNodes.length].nodeType}catch(e){L={apply:A.length?function(e,t){q.apply(e,H.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function oe(e,t,r,i){var o,s,l,c,f,h,v,m=t&&t.ownerDocument,T=t?t.nodeType:9;if(r=r||[],"string"!=typeof e||!e||1!==T&&9!==T&&11!==T)return r;if(!i&&((t?t.ownerDocument||t:w)!==d&&p(t),t=t||d,g)){if(11!==T&&(f=J.exec(e)))if(o=f[1]){if(9===T){if(!(l=t.getElementById(o)))return r;if(l.id===o)return r.push(l),r}else if(m&&(l=m.getElementById(o))&&x(t,l)&&l.id===o)return r.push(l),r}else{if(f[2])return L.apply(r,t.getElementsByTagName(e)),r;if((o=f[3])&&n.getElementsByClassName&&t.getElementsByClassName)return L.apply(r,t.getElementsByClassName(o)),r}if(n.qsa&&!S[e+" "]&&(!y||!y.test(e))){if(1!==T)m=t,v=e;else if("object"!==t.nodeName.toLowerCase()){(c=t.getAttribute("id"))?c=c.replace(te,ne):t.setAttribute("id",c=b),s=(h=a(e)).length;while(s--)h[s]="#"+c+" "+ve(h[s]);v=h.join(","),m=K.test(e)&&ge(t.parentNode)||t}if(v)try{return L.apply(r,m.querySelectorAll(v)),r}catch(e){}finally{c===b&&t.removeAttribute("id")}}}return u(e.replace(B,"$1"),t,r,i)}function ae(){var e=[];function t(n,i){return e.push(n+" ")>r.cacheLength&&delete t[e.shift()],t[n+" "]=i}return t}function se(e){return e[b]=!0,e}function ue(e){var t=d.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function le(e,t){var n=e.split("|"),i=n.length;while(i--)r.attrHandle[n[i]]=t}function ce(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function fe(e){return function(t){return"input"===t.nodeName.toLowerCase()&&t.type===e}}function pe(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function de(e){return function(t){return"form"in t?t.parentNode&&!1===t.disabled?"label"in t?"label"in t.parentNode?t.parentNode.disabled===e:t.disabled===e:t.isDisabled===e||t.isDisabled!==!e&&ie(t)===e:t.disabled===e:"label"in t&&t.disabled===e}}function he(e){return se(function(t){return t=+t,se(function(n,r){var i,o=e([],n.length,t),a=o.length;while(a--)n[i=o[a]]&&(n[i]=!(r[i]=n[i]))})})}function ge(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}n=oe.support={},o=oe.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return!!t&&"HTML"!==t.nodeName},p=oe.setDocument=function(e){var t,i,a=e?e.ownerDocument||e:w;return a!==d&&9===a.nodeType&&a.documentElement?(d=a,h=d.documentElement,g=!o(d),w!==d&&(i=d.defaultView)&&i.top!==i&&(i.addEventListener?i.addEventListener("unload",re,!1):i.attachEvent&&i.attachEvent("onunload",re)),n.attributes=ue(function(e){return e.className="i",!e.getAttribute("className")}),n.getElementsByTagName=ue(function(e){return e.appendChild(d.createComment("")),!e.getElementsByTagName("*").length}),n.getElementsByClassName=Q.test(d.getElementsByClassName),n.getById=ue(function(e){return h.appendChild(e).id=b,!d.getElementsByName||!d.getElementsByName(b).length}),n.getById?(r.filter.ID=function(e){var t=e.replace(Z,ee);return function(e){return e.getAttribute("id")===t}},r.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&g){var n=t.getElementById(e);return n?[n]:[]}}):(r.filter.ID=function(e){var t=e.replace(Z,ee);return function(e){var n="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return n&&n.value===t}},r.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&g){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),r.find.TAG=n.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):n.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},r.find.CLASS=n.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&g)return t.getElementsByClassName(e)},v=[],y=[],(n.qsa=Q.test(d.querySelectorAll))&&(ue(function(e){h.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&y.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||y.push("\\["+M+"*(?:value|"+P+")"),e.querySelectorAll("[id~="+b+"-]").length||y.push("~="),e.querySelectorAll(":checked").length||y.push(":checked"),e.querySelectorAll("a#"+b+"+*").length||y.push(".#.+[+~]")}),ue(function(e){e.innerHTML="";var t=d.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&y.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&y.push(":enabled",":disabled"),h.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&y.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),y.push(",.*:")})),(n.matchesSelector=Q.test(m=h.matches||h.webkitMatchesSelector||h.mozMatchesSelector||h.oMatchesSelector||h.msMatchesSelector))&&ue(function(e){n.disconnectedMatch=m.call(e,"*"),m.call(e,"[s!='']:x"),v.push("!=",W)}),y=y.length&&new RegExp(y.join("|")),v=v.length&&new RegExp(v.join("|")),t=Q.test(h.compareDocumentPosition),x=t||Q.test(h.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},D=t?function(e,t){if(e===t)return f=!0,0;var r=!e.compareDocumentPosition-!t.compareDocumentPosition;return r||(1&(r=(e.ownerDocument||e)===(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!n.sortDetached&&t.compareDocumentPosition(e)===r?e===d||e.ownerDocument===w&&x(w,e)?-1:t===d||t.ownerDocument===w&&x(w,t)?1:c?O(c,e)-O(c,t):0:4&r?-1:1)}:function(e,t){if(e===t)return f=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e===d?-1:t===d?1:i?-1:o?1:c?O(c,e)-O(c,t):0;if(i===o)return ce(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?ce(a[r],s[r]):a[r]===w?-1:s[r]===w?1:0},d):d},oe.matches=function(e,t){return oe(e,null,null,t)},oe.matchesSelector=function(e,t){if((e.ownerDocument||e)!==d&&p(e),t=t.replace(z,"='$1']"),n.matchesSelector&&g&&!S[t+" "]&&(!v||!v.test(t))&&(!y||!y.test(t)))try{var r=m.call(e,t);if(r||n.disconnectedMatch||e.document&&11!==e.document.nodeType)return r}catch(e){}return oe(t,d,null,[e]).length>0},oe.contains=function(e,t){return(e.ownerDocument||e)!==d&&p(e),x(e,t)},oe.attr=function(e,t){(e.ownerDocument||e)!==d&&p(e);var i=r.attrHandle[t.toLowerCase()],o=i&&N.call(r.attrHandle,t.toLowerCase())?i(e,t,!g):void 0;return void 0!==o?o:n.attributes||!g?e.getAttribute(t):(o=e.getAttributeNode(t))&&o.specified?o.value:null},oe.escape=function(e){return(e+"").replace(te,ne)},oe.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},oe.uniqueSort=function(e){var t,r=[],i=0,o=0;if(f=!n.detectDuplicates,c=!n.sortStable&&e.slice(0),e.sort(D),f){while(t=e[o++])t===e[o]&&(i=r.push(o));while(i--)e.splice(r[i],1)}return c=null,e},i=oe.getText=function(e){var t,n="",r=0,o=e.nodeType;if(o){if(1===o||9===o||11===o){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=i(e)}else if(3===o||4===o)return e.nodeValue}else while(t=e[r++])n+=i(t);return n},(r=oe.selectors={cacheLength:50,createPseudo:se,match:V,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(Z,ee),e[3]=(e[3]||e[4]||e[5]||"").replace(Z,ee),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||oe.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&oe.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return V.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=a(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(Z,ee).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=E[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&E(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var i=oe.attr(r,e);return null==i?"!="===t:!t||(i+="","="===t?i===n:"!="===t?i!==n:"^="===t?n&&0===i.indexOf(n):"*="===t?n&&i.indexOf(n)>-1:"$="===t?n&&i.slice(-n.length)===n:"~="===t?(" "+i.replace($," ")+" ").indexOf(n)>-1:"|="===t&&(i===n||i.slice(0,n.length+1)===n+"-"))}},CHILD:function(e,t,n,r,i){var o="nth"!==e.slice(0,3),a="last"!==e.slice(-4),s="of-type"===t;return 1===r&&0===i?function(e){return!!e.parentNode}:function(t,n,u){var l,c,f,p,d,h,g=o!==a?"nextSibling":"previousSibling",y=t.parentNode,v=s&&t.nodeName.toLowerCase(),m=!u&&!s,x=!1;if(y){if(o){while(g){p=t;while(p=p[g])if(s?p.nodeName.toLowerCase()===v:1===p.nodeType)return!1;h=g="only"===e&&!h&&"nextSibling"}return!0}if(h=[a?y.firstChild:y.lastChild],a&&m){x=(d=(l=(c=(f=(p=y)[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]||[])[0]===T&&l[1])&&l[2],p=d&&y.childNodes[d];while(p=++d&&p&&p[g]||(x=d=0)||h.pop())if(1===p.nodeType&&++x&&p===t){c[e]=[T,d,x];break}}else if(m&&(x=d=(l=(c=(f=(p=t)[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]||[])[0]===T&&l[1]),!1===x)while(p=++d&&p&&p[g]||(x=d=0)||h.pop())if((s?p.nodeName.toLowerCase()===v:1===p.nodeType)&&++x&&(m&&((c=(f=p[b]||(p[b]={}))[p.uniqueID]||(f[p.uniqueID]={}))[e]=[T,x]),p===t))break;return(x-=i)===r||x%r==0&&x/r>=0}}},PSEUDO:function(e,t){var n,i=r.pseudos[e]||r.setFilters[e.toLowerCase()]||oe.error("unsupported pseudo: "+e);return i[b]?i(t):i.length>1?(n=[e,e,"",t],r.setFilters.hasOwnProperty(e.toLowerCase())?se(function(e,n){var r,o=i(e,t),a=o.length;while(a--)e[r=O(e,o[a])]=!(n[r]=o[a])}):function(e){return i(e,0,n)}):i}},pseudos:{not:se(function(e){var t=[],n=[],r=s(e.replace(B,"$1"));return r[b]?se(function(e,t,n,i){var o,a=r(e,null,i,[]),s=e.length;while(s--)(o=a[s])&&(e[s]=!(t[s]=o))}):function(e,i,o){return t[0]=e,r(t,null,o,n),t[0]=null,!n.pop()}}),has:se(function(e){return function(t){return oe(e,t).length>0}}),contains:se(function(e){return e=e.replace(Z,ee),function(t){return(t.textContent||t.innerText||i(t)).indexOf(e)>-1}}),lang:se(function(e){return U.test(e||"")||oe.error("unsupported lang: "+e),e=e.replace(Z,ee).toLowerCase(),function(t){var n;do{if(n=g?t.lang:t.getAttribute("xml:lang")||t.getAttribute("lang"))return(n=n.toLowerCase())===e||0===n.indexOf(e+"-")}while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===h},focus:function(e){return e===d.activeElement&&(!d.hasFocus||d.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:de(!1),disabled:de(!0),checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,!0===e.selected},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeType<6)return!1;return!0},parent:function(e){return!r.pseudos.empty(e)},header:function(e){return Y.test(e.nodeName)},input:function(e){return G.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||"text"===t.toLowerCase())},first:he(function(){return[0]}),last:he(function(e,t){return[t-1]}),eq:he(function(e,t,n){return[n<0?n+t:n]}),even:he(function(e,t){for(var n=0;n=0;)e.push(r);return e}),gt:he(function(e,t,n){for(var r=n<0?n+t:n;++r1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function be(e,t,n){for(var r=0,i=t.length;r-1&&(o[l]=!(a[l]=f))}}else v=we(v===a?v.splice(h,v.length):v),i?i(null,a,v,u):L.apply(a,v)})}function Ce(e){for(var t,n,i,o=e.length,a=r.relative[e[0].type],s=a||r.relative[" "],u=a?1:0,c=me(function(e){return e===t},s,!0),f=me(function(e){return O(t,e)>-1},s,!0),p=[function(e,n,r){var i=!a&&(r||n!==l)||((t=n).nodeType?c(e,n,r):f(e,n,r));return t=null,i}];u1&&xe(p),u>1&&ve(e.slice(0,u-1).concat({value:" "===e[u-2].type?"*":""})).replace(B,"$1"),n,u0,i=e.length>0,o=function(o,a,s,u,c){var f,h,y,v=0,m="0",x=o&&[],b=[],w=l,C=o||i&&r.find.TAG("*",c),E=T+=null==w?1:Math.random()||.1,k=C.length;for(c&&(l=a===d||a||c);m!==k&&null!=(f=C[m]);m++){if(i&&f){h=0,a||f.ownerDocument===d||(p(f),s=!g);while(y=e[h++])if(y(f,a||d,s)){u.push(f);break}c&&(T=E)}n&&((f=!y&&f)&&v--,o&&x.push(f))}if(v+=m,n&&m!==v){h=0;while(y=t[h++])y(x,b,a,s);if(o){if(v>0)while(m--)x[m]||b[m]||(b[m]=j.call(u));b=we(b)}L.apply(u,b),c&&!o&&b.length>0&&v+t.length>1&&oe.uniqueSort(u)}return c&&(T=E,l=w),x};return n?se(o):o}return s=oe.compile=function(e,t){var n,r=[],i=[],o=S[e+" "];if(!o){t||(t=a(e)),n=t.length;while(n--)(o=Ce(t[n]))[b]?r.push(o):i.push(o);(o=S(e,Ee(i,r))).selector=e}return o},u=oe.select=function(e,t,n,i){var o,u,l,c,f,p="function"==typeof e&&e,d=!i&&a(e=p.selector||e);if(n=n||[],1===d.length){if((u=d[0]=d[0].slice(0)).length>2&&"ID"===(l=u[0]).type&&9===t.nodeType&&g&&r.relative[u[1].type]){if(!(t=(r.find.ID(l.matches[0].replace(Z,ee),t)||[])[0]))return n;p&&(t=t.parentNode),e=e.slice(u.shift().value.length)}o=V.needsContext.test(e)?0:u.length;while(o--){if(l=u[o],r.relative[c=l.type])break;if((f=r.find[c])&&(i=f(l.matches[0].replace(Z,ee),K.test(u[0].type)&&ge(t.parentNode)||t))){if(u.splice(o,1),!(e=i.length&&ve(u)))return L.apply(n,i),n;break}}}return(p||s(e,d))(i,t,!g,n,!t||K.test(e)&&ge(t.parentNode)||t),n},n.sortStable=b.split("").sort(D).join("")===b,n.detectDuplicates=!!f,p(),n.sortDetached=ue(function(e){return 1&e.compareDocumentPosition(d.createElement("fieldset"))}),ue(function(e){return e.innerHTML="","#"===e.firstChild.getAttribute("href")})||le("type|href|height|width",function(e,t,n){if(!n)return e.getAttribute(t,"type"===t.toLowerCase()?1:2)}),n.attributes&&ue(function(e){return e.innerHTML="",e.firstChild.setAttribute("value",""),""===e.firstChild.getAttribute("value")})||le("value",function(e,t,n){if(!n&&"input"===e.nodeName.toLowerCase())return e.defaultValue}),ue(function(e){return null==e.getAttribute("disabled")})||le(P,function(e,t,n){var r;if(!n)return!0===e[t]?t.toLowerCase():(r=e.getAttributeNode(t))&&r.specified?r.value:null}),oe}(e);w.find=E,w.expr=E.selectors,w.expr[":"]=w.expr.pseudos,w.uniqueSort=w.unique=E.uniqueSort,w.text=E.getText,w.isXMLDoc=E.isXML,w.contains=E.contains,w.escapeSelector=E.escape;var k=function(e,t,n){var r=[],i=void 0!==n;while((e=e[t])&&9!==e.nodeType)if(1===e.nodeType){if(i&&w(e).is(n))break;r.push(e)}return r},S=function(e,t){for(var n=[];e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n},D=w.expr.match.needsContext;function N(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()}var A=/^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,t,n){return g(t)?w.grep(e,function(e,r){return!!t.call(e,r,e)!==n}):t.nodeType?w.grep(e,function(e){return e===t!==n}):"string"!=typeof t?w.grep(e,function(e){return u.call(t,e)>-1!==n}):w.filter(t,e,n)}w.filter=function(e,t,n){var r=t[0];return n&&(e=":not("+e+")"),1===t.length&&1===r.nodeType?w.find.matchesSelector(r,e)?[r]:[]:w.find.matches(e,w.grep(t,function(e){return 1===e.nodeType}))},w.fn.extend({find:function(e){var t,n,r=this.length,i=this;if("string"!=typeof e)return this.pushStack(w(e).filter(function(){for(t=0;t1?w.uniqueSort(n):n},filter:function(e){return this.pushStack(j(this,e||[],!1))},not:function(e){return this.pushStack(j(this,e||[],!0))},is:function(e){return!!j(this,"string"==typeof e&&D.test(e)?w(e):e||[],!1).length}});var q,L=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/;(w.fn.init=function(e,t,n){var i,o;if(!e)return this;if(n=n||q,"string"==typeof e){if(!(i="<"===e[0]&&">"===e[e.length-1]&&e.length>=3?[null,e,null]:L.exec(e))||!i[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(i[1]){if(t=t instanceof w?t[0]:t,w.merge(this,w.parseHTML(i[1],t&&t.nodeType?t.ownerDocument||t:r,!0)),A.test(i[1])&&w.isPlainObject(t))for(i in t)g(this[i])?this[i](t[i]):this.attr(i,t[i]);return this}return(o=r.getElementById(i[2]))&&(this[0]=o,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):g(e)?void 0!==n.ready?n.ready(e):e(w):w.makeArray(e,this)}).prototype=w.fn,q=w(r);var H=/^(?:parents|prev(?:Until|All))/,O={children:!0,contents:!0,next:!0,prev:!0};w.fn.extend({has:function(e){var t=w(e,this),n=t.length;return this.filter(function(){for(var e=0;e-1:1===n.nodeType&&w.find.matchesSelector(n,e))){o.push(n);break}return this.pushStack(o.length>1?w.uniqueSort(o):o)},index:function(e){return e?"string"==typeof e?u.call(w(e),this[0]):u.call(this,e.jquery?e[0]:e):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){return this.pushStack(w.uniqueSort(w.merge(this.get(),w(e,t))))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}});function P(e,t){while((e=e[t])&&1!==e.nodeType);return e}w.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return k(e,"parentNode")},parentsUntil:function(e,t,n){return k(e,"parentNode",n)},next:function(e){return P(e,"nextSibling")},prev:function(e){return P(e,"previousSibling")},nextAll:function(e){return k(e,"nextSibling")},prevAll:function(e){return k(e,"previousSibling")},nextUntil:function(e,t,n){return k(e,"nextSibling",n)},prevUntil:function(e,t,n){return k(e,"previousSibling",n)},siblings:function(e){return S((e.parentNode||{}).firstChild,e)},children:function(e){return S(e.firstChild)},contents:function(e){return N(e,"iframe")?e.contentDocument:(N(e,"template")&&(e=e.content||e),w.merge([],e.childNodes))}},function(e,t){w.fn[e]=function(n,r){var i=w.map(this,t,n);return"Until"!==e.slice(-5)&&(r=n),r&&"string"==typeof r&&(i=w.filter(r,i)),this.length>1&&(O[e]||w.uniqueSort(i),H.test(e)&&i.reverse()),this.pushStack(i)}});var M=/[^\x20\t\r\n\f]+/g;function R(e){var t={};return w.each(e.match(M)||[],function(e,n){t[n]=!0}),t}w.Callbacks=function(e){e="string"==typeof e?R(e):w.extend({},e);var t,n,r,i,o=[],a=[],s=-1,u=function(){for(i=i||e.once,r=t=!0;a.length;s=-1){n=a.shift();while(++s-1)o.splice(n,1),n<=s&&s--}),this},has:function(e){return e?w.inArray(e,o)>-1:o.length>0},empty:function(){return o&&(o=[]),this},disable:function(){return i=a=[],o=n="",this},disabled:function(){return!o},lock:function(){return i=a=[],n||t||(o=n=""),this},locked:function(){return!!i},fireWith:function(e,n){return i||(n=[e,(n=n||[]).slice?n.slice():n],a.push(n),t||u()),this},fire:function(){return l.fireWith(this,arguments),this},fired:function(){return!!r}};return l};function I(e){return e}function W(e){throw e}function $(e,t,n,r){var i;try{e&&g(i=e.promise)?i.call(e).done(t).fail(n):e&&g(i=e.then)?i.call(e,t,n):t.apply(void 0,[e].slice(r))}catch(e){n.apply(void 0,[e])}}w.extend({Deferred:function(t){var n=[["notify","progress",w.Callbacks("memory"),w.Callbacks("memory"),2],["resolve","done",w.Callbacks("once memory"),w.Callbacks("once memory"),0,"resolved"],["reject","fail",w.Callbacks("once memory"),w.Callbacks("once memory"),1,"rejected"]],r="pending",i={state:function(){return r},always:function(){return o.done(arguments).fail(arguments),this},"catch":function(e){return i.then(null,e)},pipe:function(){var e=arguments;return w.Deferred(function(t){w.each(n,function(n,r){var i=g(e[r[4]])&&e[r[4]];o[r[1]](function(){var e=i&&i.apply(this,arguments);e&&g(e.promise)?e.promise().progress(t.notify).done(t.resolve).fail(t.reject):t[r[0]+"With"](this,i?[e]:arguments)})}),e=null}).promise()},then:function(t,r,i){var o=0;function a(t,n,r,i){return function(){var s=this,u=arguments,l=function(){var e,l;if(!(t=o&&(r!==W&&(s=void 0,u=[e]),n.rejectWith(s,u))}};t?c():(w.Deferred.getStackHook&&(c.stackTrace=w.Deferred.getStackHook()),e.setTimeout(c))}}return w.Deferred(function(e){n[0][3].add(a(0,e,g(i)?i:I,e.notifyWith)),n[1][3].add(a(0,e,g(t)?t:I)),n[2][3].add(a(0,e,g(r)?r:W))}).promise()},promise:function(e){return null!=e?w.extend(e,i):i}},o={};return w.each(n,function(e,t){var a=t[2],s=t[5];i[t[1]]=a.add,s&&a.add(function(){r=s},n[3-e][2].disable,n[3-e][3].disable,n[0][2].lock,n[0][3].lock),a.add(t[3].fire),o[t[0]]=function(){return o[t[0]+"With"](this===o?void 0:this,arguments),this},o[t[0]+"With"]=a.fireWith}),i.promise(o),t&&t.call(o,o),o},when:function(e){var t=arguments.length,n=t,r=Array(n),i=o.call(arguments),a=w.Deferred(),s=function(e){return function(n){r[e]=this,i[e]=arguments.length>1?o.call(arguments):n,--t||a.resolveWith(r,i)}};if(t<=1&&($(e,a.done(s(n)).resolve,a.reject,!t),"pending"===a.state()||g(i[n]&&i[n].then)))return a.then();while(n--)$(i[n],s(n),a.reject);return a.promise()}});var B=/^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/;w.Deferred.exceptionHook=function(t,n){e.console&&e.console.warn&&t&&B.test(t.name)&&e.console.warn("jQuery.Deferred exception: "+t.message,t.stack,n)},w.readyException=function(t){e.setTimeout(function(){throw t})};var F=w.Deferred();w.fn.ready=function(e){return F.then(e)["catch"](function(e){w.readyException(e)}),this},w.extend({isReady:!1,readyWait:1,ready:function(e){(!0===e?--w.readyWait:w.isReady)||(w.isReady=!0,!0!==e&&--w.readyWait>0||F.resolveWith(r,[w]))}}),w.ready.then=F.then;function _(){r.removeEventListener("DOMContentLoaded",_),e.removeEventListener("load",_),w.ready()}"complete"===r.readyState||"loading"!==r.readyState&&!r.documentElement.doScroll?e.setTimeout(w.ready):(r.addEventListener("DOMContentLoaded",_),e.addEventListener("load",_));var z=function(e,t,n,r,i,o,a){var s=0,u=e.length,l=null==n;if("object"===x(n)){i=!0;for(s in n)z(e,t,s,n[s],!0,o,a)}else if(void 0!==r&&(i=!0,g(r)||(a=!0),l&&(a?(t.call(e,r),t=null):(l=t,t=function(e,t,n){return l.call(w(e),n)})),t))for(;s1,null,!0)},removeData:function(e){return this.each(function(){K.remove(this,e)})}}),w.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=J.get(e,t),n&&(!r||Array.isArray(n)?r=J.access(e,t,w.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=w.queue(e,t),r=n.length,i=n.shift(),o=w._queueHooks(e,t),a=function(){w.dequeue(e,t)};"inprogress"===i&&(i=n.shift(),r--),i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,a,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return J.get(e,n)||J.access(e,n,{empty:w.Callbacks("once memory").add(function(){J.remove(e,[t+"queue",n])})})}}),w.fn.extend({queue:function(e,t){var n=2;return"string"!=typeof e&&(t=e,e="fx",n--),arguments.length\x20\t\r\n\f]+)/i,he=/^$|^module$|\/(?:java|ecma)script/i,ge={option:[1,""],thead:[1,"","
    "],col:[2,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],_default:[0,"",""]};ge.optgroup=ge.option,ge.tbody=ge.tfoot=ge.colgroup=ge.caption=ge.thead,ge.th=ge.td;function ye(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&N(e,t)?w.merge([e],n):n}function ve(e,t){for(var n=0,r=e.length;n-1)i&&i.push(o);else if(l=w.contains(o.ownerDocument,o),a=ye(f.appendChild(o),"script"),l&&ve(a),n){c=0;while(o=a[c++])he.test(o.type||"")&&n.push(o)}return f}!function(){var e=r.createDocumentFragment().appendChild(r.createElement("div")),t=r.createElement("input");t.setAttribute("type","radio"),t.setAttribute("checked","checked"),t.setAttribute("name","t"),e.appendChild(t),h.checkClone=e.cloneNode(!0).cloneNode(!0).lastChild.checked,e.innerHTML="",h.noCloneChecked=!!e.cloneNode(!0).lastChild.defaultValue}();var be=r.documentElement,we=/^key/,Te=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,Ce=/^([^.]*)(?:\.(.+)|)/;function Ee(){return!0}function ke(){return!1}function Se(){try{return r.activeElement}catch(e){}}function De(e,t,n,r,i,o){var a,s;if("object"==typeof t){"string"!=typeof n&&(r=r||n,n=void 0);for(s in t)De(e,s,n,r,t[s],o);return e}if(null==r&&null==i?(i=n,r=n=void 0):null==i&&("string"==typeof n?(i=r,r=void 0):(i=r,r=n,n=void 0)),!1===i)i=ke;else if(!i)return e;return 1===o&&(a=i,(i=function(e){return w().off(e),a.apply(this,arguments)}).guid=a.guid||(a.guid=w.guid++)),e.each(function(){w.event.add(this,t,i,r,n)})}w.event={global:{},add:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,y=J.get(e);if(y){n.handler&&(n=(o=n).handler,i=o.selector),i&&w.find.matchesSelector(be,i),n.guid||(n.guid=w.guid++),(u=y.events)||(u=y.events={}),(a=y.handle)||(a=y.handle=function(t){return"undefined"!=typeof w&&w.event.triggered!==t.type?w.event.dispatch.apply(e,arguments):void 0}),l=(t=(t||"").match(M)||[""]).length;while(l--)d=g=(s=Ce.exec(t[l])||[])[1],h=(s[2]||"").split(".").sort(),d&&(f=w.event.special[d]||{},d=(i?f.delegateType:f.bindType)||d,f=w.event.special[d]||{},c=w.extend({type:d,origType:g,data:r,handler:n,guid:n.guid,selector:i,needsContext:i&&w.expr.match.needsContext.test(i),namespace:h.join(".")},o),(p=u[d])||((p=u[d]=[]).delegateCount=0,f.setup&&!1!==f.setup.call(e,r,h,a)||e.addEventListener&&e.addEventListener(d,a)),f.add&&(f.add.call(e,c),c.handler.guid||(c.handler.guid=n.guid)),i?p.splice(p.delegateCount++,0,c):p.push(c),w.event.global[d]=!0)}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,f,p,d,h,g,y=J.hasData(e)&&J.get(e);if(y&&(u=y.events)){l=(t=(t||"").match(M)||[""]).length;while(l--)if(s=Ce.exec(t[l])||[],d=g=s[1],h=(s[2]||"").split(".").sort(),d){f=w.event.special[d]||{},p=u[d=(r?f.delegateType:f.bindType)||d]||[],s=s[2]&&new RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),a=o=p.length;while(o--)c=p[o],!i&&g!==c.origType||n&&n.guid!==c.guid||s&&!s.test(c.namespace)||r&&r!==c.selector&&("**"!==r||!c.selector)||(p.splice(o,1),c.selector&&p.delegateCount--,f.remove&&f.remove.call(e,c));a&&!p.length&&(f.teardown&&!1!==f.teardown.call(e,h,y.handle)||w.removeEvent(e,d,y.handle),delete u[d])}else for(d in u)w.event.remove(e,d+t[l],n,r,!0);w.isEmptyObject(u)&&J.remove(e,"handle events")}},dispatch:function(e){var t=w.event.fix(e),n,r,i,o,a,s,u=new Array(arguments.length),l=(J.get(this,"events")||{})[t.type]||[],c=w.event.special[t.type]||{};for(u[0]=t,n=1;n=1))for(;l!==this;l=l.parentNode||this)if(1===l.nodeType&&("click"!==e.type||!0!==l.disabled)){for(o=[],a={},n=0;n-1:w.find(i,this,null,[l]).length),a[i]&&o.push(r);o.length&&s.push({elem:l,handlers:o})}return l=this,u\x20\t\r\n\f]*)[^>]*)\/>/gi,Ae=/\s*$/g;function Le(e,t){return N(e,"table")&&N(11!==t.nodeType?t:t.firstChild,"tr")?w(e).children("tbody")[0]||e:e}function He(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function Oe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Pe(e,t){var n,r,i,o,a,s,u,l;if(1===t.nodeType){if(J.hasData(e)&&(o=J.access(e),a=J.set(t,o),l=o.events)){delete a.handle,a.events={};for(i in l)for(n=0,r=l[i].length;n1&&"string"==typeof y&&!h.checkClone&&je.test(y))return e.each(function(i){var o=e.eq(i);v&&(t[0]=y.call(this,i,o.html())),Re(o,t,n,r)});if(p&&(i=xe(t,e[0].ownerDocument,!1,e,r),o=i.firstChild,1===i.childNodes.length&&(i=o),o||r)){for(u=(s=w.map(ye(i,"script"),He)).length;f")},clone:function(e,t,n){var r,i,o,a,s=e.cloneNode(!0),u=w.contains(e.ownerDocument,e);if(!(h.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||w.isXMLDoc(e)))for(a=ye(s),r=0,i=(o=ye(e)).length;r0&&ve(a,!u&&ye(e,"script")),s},cleanData:function(e){for(var t,n,r,i=w.event.special,o=0;void 0!==(n=e[o]);o++)if(Y(n)){if(t=n[J.expando]){if(t.events)for(r in t.events)i[r]?w.event.remove(n,r):w.removeEvent(n,r,t.handle);n[J.expando]=void 0}n[K.expando]&&(n[K.expando]=void 0)}}}),w.fn.extend({detach:function(e){return Ie(this,e,!0)},remove:function(e){return Ie(this,e)},text:function(e){return z(this,function(e){return void 0===e?w.text(this):this.empty().each(function(){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||(this.textContent=e)})},null,e,arguments.length)},append:function(){return Re(this,arguments,function(e){1!==this.nodeType&&11!==this.nodeType&&9!==this.nodeType||Le(this,e).appendChild(e)})},prepend:function(){return Re(this,arguments,function(e){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var t=Le(this,e);t.insertBefore(e,t.firstChild)}})},before:function(){return Re(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return Re(this,arguments,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},empty:function(){for(var e,t=0;null!=(e=this[t]);t++)1===e.nodeType&&(w.cleanData(ye(e,!1)),e.textContent="");return this},clone:function(e,t){return e=null!=e&&e,t=null==t?e:t,this.map(function(){return w.clone(this,e,t)})},html:function(e){return z(this,function(e){var t=this[0]||{},n=0,r=this.length;if(void 0===e&&1===t.nodeType)return t.innerHTML;if("string"==typeof e&&!Ae.test(e)&&!ge[(de.exec(e)||["",""])[1].toLowerCase()]){e=w.htmlPrefilter(e);try{for(;n=0&&(u+=Math.max(0,Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-o-u-s-.5))),u}function et(e,t,n){var r=$e(e),i=Fe(e,t,r),o="border-box"===w.css(e,"boxSizing",!1,r),a=o;if(We.test(i)){if(!n)return i;i="auto"}return a=a&&(h.boxSizingReliable()||i===e.style[t]),("auto"===i||!parseFloat(i)&&"inline"===w.css(e,"display",!1,r))&&(i=e["offset"+t[0].toUpperCase()+t.slice(1)],a=!0),(i=parseFloat(i)||0)+Ze(e,t,n||(o?"border":"content"),a,r,i)+"px"}w.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Fe(e,"opacity");return""===n?"1":n}}}},cssNumber:{animationIterationCount:!0,columnCount:!0,fillOpacity:!0,flexGrow:!0,flexShrink:!0,fontWeight:!0,lineHeight:!0,opacity:!0,order:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{},style:function(e,t,n,r){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var i,o,a,s=G(t),u=Xe.test(t),l=e.style;if(u||(t=Je(s)),a=w.cssHooks[t]||w.cssHooks[s],void 0===n)return a&&"get"in a&&void 0!==(i=a.get(e,!1,r))?i:l[t];"string"==(o=typeof n)&&(i=ie.exec(n))&&i[1]&&(n=ue(e,t,i),o="number"),null!=n&&n===n&&("number"===o&&(n+=i&&i[3]||(w.cssNumber[s]?"":"px")),h.clearCloneStyle||""!==n||0!==t.indexOf("background")||(l[t]="inherit"),a&&"set"in a&&void 0===(n=a.set(e,n,r))||(u?l.setProperty(t,n):l[t]=n))}},css:function(e,t,n,r){var i,o,a,s=G(t);return Xe.test(t)||(t=Je(s)),(a=w.cssHooks[t]||w.cssHooks[s])&&"get"in a&&(i=a.get(e,!0,n)),void 0===i&&(i=Fe(e,t,r)),"normal"===i&&t in Ve&&(i=Ve[t]),""===n||n?(o=parseFloat(i),!0===n||isFinite(o)?o||0:i):i}}),w.each(["height","width"],function(e,t){w.cssHooks[t]={get:function(e,n,r){if(n)return!ze.test(w.css(e,"display"))||e.getClientRects().length&&e.getBoundingClientRect().width?et(e,t,r):se(e,Ue,function(){return et(e,t,r)})},set:function(e,n,r){var i,o=$e(e),a="border-box"===w.css(e,"boxSizing",!1,o),s=r&&Ze(e,t,r,a,o);return a&&h.scrollboxSize()===o.position&&(s-=Math.ceil(e["offset"+t[0].toUpperCase()+t.slice(1)]-parseFloat(o[t])-Ze(e,t,"border",!1,o)-.5)),s&&(i=ie.exec(n))&&"px"!==(i[3]||"px")&&(e.style[t]=n,n=w.css(e,t)),Ke(e,n,s)}}}),w.cssHooks.marginLeft=_e(h.reliableMarginLeft,function(e,t){if(t)return(parseFloat(Fe(e,"marginLeft"))||e.getBoundingClientRect().left-se(e,{marginLeft:0},function(){return e.getBoundingClientRect().left}))+"px"}),w.each({margin:"",padding:"",border:"Width"},function(e,t){w.cssHooks[e+t]={expand:function(n){for(var r=0,i={},o="string"==typeof n?n.split(" "):[n];r<4;r++)i[e+oe[r]+t]=o[r]||o[r-2]||o[0];return i}},"margin"!==e&&(w.cssHooks[e+t].set=Ke)}),w.fn.extend({css:function(e,t){return z(this,function(e,t,n){var r,i,o={},a=0;if(Array.isArray(t)){for(r=$e(e),i=t.length;a1)}});function tt(e,t,n,r,i){return new tt.prototype.init(e,t,n,r,i)}w.Tween=tt,tt.prototype={constructor:tt,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||w.easing._default,this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(w.cssNumber[n]?"":"px")},cur:function(){var e=tt.propHooks[this.prop];return e&&e.get?e.get(this):tt.propHooks._default.get(this)},run:function(e){var t,n=tt.propHooks[this.prop];return this.options.duration?this.pos=t=w.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):tt.propHooks._default.set(this),this}},tt.prototype.init.prototype=tt.prototype,tt.propHooks={_default:{get:function(e){var t;return 1!==e.elem.nodeType||null!=e.elem[e.prop]&&null==e.elem.style[e.prop]?e.elem[e.prop]:(t=w.css(e.elem,e.prop,""))&&"auto"!==t?t:0},set:function(e){w.fx.step[e.prop]?w.fx.step[e.prop](e):1!==e.elem.nodeType||null==e.elem.style[w.cssProps[e.prop]]&&!w.cssHooks[e.prop]?e.elem[e.prop]=e.now:w.style(e.elem,e.prop,e.now+e.unit)}}},tt.propHooks.scrollTop=tt.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},w.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2},_default:"swing"},w.fx=tt.prototype.init,w.fx.step={};var nt,rt,it=/^(?:toggle|show|hide)$/,ot=/queueHooks$/;function at(){rt&&(!1===r.hidden&&e.requestAnimationFrame?e.requestAnimationFrame(at):e.setTimeout(at,w.fx.interval),w.fx.tick())}function st(){return e.setTimeout(function(){nt=void 0}),nt=Date.now()}function ut(e,t){var n,r=0,i={height:e};for(t=t?1:0;r<4;r+=2-t)i["margin"+(n=oe[r])]=i["padding"+n]=e;return t&&(i.opacity=i.width=e),i}function lt(e,t,n){for(var r,i=(pt.tweeners[t]||[]).concat(pt.tweeners["*"]),o=0,a=i.length;o1)},removeAttr:function(e){return this.each(function(){w.removeAttr(this,e)})}}),w.extend({attr:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return"undefined"==typeof e.getAttribute?w.prop(e,t,n):(1===o&&w.isXMLDoc(e)||(i=w.attrHooks[t.toLowerCase()]||(w.expr.match.bool.test(t)?dt:void 0)),void 0!==n?null===n?void w.removeAttr(e,t):i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:(e.setAttribute(t,n+""),n):i&&"get"in i&&null!==(r=i.get(e,t))?r:null==(r=w.find.attr(e,t))?void 0:r)},attrHooks:{type:{set:function(e,t){if(!h.radioValue&&"radio"===t&&N(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},removeAttr:function(e,t){var n,r=0,i=t&&t.match(M);if(i&&1===e.nodeType)while(n=i[r++])e.removeAttribute(n)}}),dt={set:function(e,t,n){return!1===t?w.removeAttr(e,n):e.setAttribute(n,n),n}},w.each(w.expr.match.bool.source.match(/\w+/g),function(e,t){var n=ht[t]||w.find.attr;ht[t]=function(e,t,r){var i,o,a=t.toLowerCase();return r||(o=ht[a],ht[a]=i,i=null!=n(e,t,r)?a:null,ht[a]=o),i}});var gt=/^(?:input|select|textarea|button)$/i,yt=/^(?:a|area)$/i;w.fn.extend({prop:function(e,t){return z(this,w.prop,e,t,arguments.length>1)},removeProp:function(e){return this.each(function(){delete this[w.propFix[e]||e]})}}),w.extend({prop:function(e,t,n){var r,i,o=e.nodeType;if(3!==o&&8!==o&&2!==o)return 1===o&&w.isXMLDoc(e)||(t=w.propFix[t]||t,i=w.propHooks[t]),void 0!==n?i&&"set"in i&&void 0!==(r=i.set(e,n,t))?r:e[t]=n:i&&"get"in i&&null!==(r=i.get(e,t))?r:e[t]},propHooks:{tabIndex:{get:function(e){var t=w.find.attr(e,"tabindex");return t?parseInt(t,10):gt.test(e.nodeName)||yt.test(e.nodeName)&&e.href?0:-1}}},propFix:{"for":"htmlFor","class":"className"}}),h.optSelected||(w.propHooks.selected={get:function(e){var t=e.parentNode;return t&&t.parentNode&&t.parentNode.selectedIndex,null},set:function(e){var t=e.parentNode;t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex)}}),w.each(["tabIndex","readOnly","maxLength","cellSpacing","cellPadding","rowSpan","colSpan","useMap","frameBorder","contentEditable"],function(){w.propFix[this.toLowerCase()]=this});function vt(e){return(e.match(M)||[]).join(" ")}function mt(e){return e.getAttribute&&e.getAttribute("class")||""}function xt(e){return Array.isArray(e)?e:"string"==typeof e?e.match(M)||[]:[]}w.fn.extend({addClass:function(e){var t,n,r,i,o,a,s,u=0;if(g(e))return this.each(function(t){w(this).addClass(e.call(this,t,mt(this)))});if((t=xt(e)).length)while(n=this[u++])if(i=mt(n),r=1===n.nodeType&&" "+vt(i)+" "){a=0;while(o=t[a++])r.indexOf(" "+o+" ")<0&&(r+=o+" ");i!==(s=vt(r))&&n.setAttribute("class",s)}return this},removeClass:function(e){var t,n,r,i,o,a,s,u=0;if(g(e))return this.each(function(t){w(this).removeClass(e.call(this,t,mt(this)))});if(!arguments.length)return this.attr("class","");if((t=xt(e)).length)while(n=this[u++])if(i=mt(n),r=1===n.nodeType&&" "+vt(i)+" "){a=0;while(o=t[a++])while(r.indexOf(" "+o+" ")>-1)r=r.replace(" "+o+" "," ");i!==(s=vt(r))&&n.setAttribute("class",s)}return this},toggleClass:function(e,t){var n=typeof e,r="string"===n||Array.isArray(e);return"boolean"==typeof t&&r?t?this.addClass(e):this.removeClass(e):g(e)?this.each(function(n){w(this).toggleClass(e.call(this,n,mt(this),t),t)}):this.each(function(){var t,i,o,a;if(r){i=0,o=w(this),a=xt(e);while(t=a[i++])o.hasClass(t)?o.removeClass(t):o.addClass(t)}else void 0!==e&&"boolean"!==n||((t=mt(this))&&J.set(this,"__className__",t),this.setAttribute&&this.setAttribute("class",t||!1===e?"":J.get(this,"__className__")||""))})},hasClass:function(e){var t,n,r=0;t=" "+e+" ";while(n=this[r++])if(1===n.nodeType&&(" "+vt(mt(n))+" ").indexOf(t)>-1)return!0;return!1}});var bt=/\r/g;w.fn.extend({val:function(e){var t,n,r,i=this[0];{if(arguments.length)return r=g(e),this.each(function(n){var i;1===this.nodeType&&(null==(i=r?e.call(this,n,w(this).val()):e)?i="":"number"==typeof i?i+="":Array.isArray(i)&&(i=w.map(i,function(e){return null==e?"":e+""})),(t=w.valHooks[this.type]||w.valHooks[this.nodeName.toLowerCase()])&&"set"in t&&void 0!==t.set(this,i,"value")||(this.value=i))});if(i)return(t=w.valHooks[i.type]||w.valHooks[i.nodeName.toLowerCase()])&&"get"in t&&void 0!==(n=t.get(i,"value"))?n:"string"==typeof(n=i.value)?n.replace(bt,""):null==n?"":n}}}),w.extend({valHooks:{option:{get:function(e){var t=w.find.attr(e,"value");return null!=t?t:vt(w.text(e))}},select:{get:function(e){var t,n,r,i=e.options,o=e.selectedIndex,a="select-one"===e.type,s=a?null:[],u=a?o+1:i.length;for(r=o<0?u:a?o:0;r-1)&&(n=!0);return n||(e.selectedIndex=-1),o}}}}),w.each(["radio","checkbox"],function(){w.valHooks[this]={set:function(e,t){if(Array.isArray(t))return e.checked=w.inArray(w(e).val(),t)>-1}},h.checkOn||(w.valHooks[this].get=function(e){return null===e.getAttribute("value")?"on":e.value})}),h.focusin="onfocusin"in e;var wt=/^(?:focusinfocus|focusoutblur)$/,Tt=function(e){e.stopPropagation()};w.extend(w.event,{trigger:function(t,n,i,o){var a,s,u,l,c,p,d,h,v=[i||r],m=f.call(t,"type")?t.type:t,x=f.call(t,"namespace")?t.namespace.split("."):[];if(s=h=u=i=i||r,3!==i.nodeType&&8!==i.nodeType&&!wt.test(m+w.event.triggered)&&(m.indexOf(".")>-1&&(m=(x=m.split(".")).shift(),x.sort()),c=m.indexOf(":")<0&&"on"+m,t=t[w.expando]?t:new w.Event(m,"object"==typeof t&&t),t.isTrigger=o?2:3,t.namespace=x.join("."),t.rnamespace=t.namespace?new RegExp("(^|\\.)"+x.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,t.result=void 0,t.target||(t.target=i),n=null==n?[t]:w.makeArray(n,[t]),d=w.event.special[m]||{},o||!d.trigger||!1!==d.trigger.apply(i,n))){if(!o&&!d.noBubble&&!y(i)){for(l=d.delegateType||m,wt.test(l+m)||(s=s.parentNode);s;s=s.parentNode)v.push(s),u=s;u===(i.ownerDocument||r)&&v.push(u.defaultView||u.parentWindow||e)}a=0;while((s=v[a++])&&!t.isPropagationStopped())h=s,t.type=a>1?l:d.bindType||m,(p=(J.get(s,"events")||{})[t.type]&&J.get(s,"handle"))&&p.apply(s,n),(p=c&&s[c])&&p.apply&&Y(s)&&(t.result=p.apply(s,n),!1===t.result&&t.preventDefault());return t.type=m,o||t.isDefaultPrevented()||d._default&&!1!==d._default.apply(v.pop(),n)||!Y(i)||c&&g(i[m])&&!y(i)&&((u=i[c])&&(i[c]=null),w.event.triggered=m,t.isPropagationStopped()&&h.addEventListener(m,Tt),i[m](),t.isPropagationStopped()&&h.removeEventListener(m,Tt),w.event.triggered=void 0,u&&(i[c]=u)),t.result}},simulate:function(e,t,n){var r=w.extend(new w.Event,n,{type:e,isSimulated:!0});w.event.trigger(r,null,t)}}),w.fn.extend({trigger:function(e,t){return this.each(function(){w.event.trigger(e,t,this)})},triggerHandler:function(e,t){var n=this[0];if(n)return w.event.trigger(e,t,n,!0)}}),h.focusin||w.each({focus:"focusin",blur:"focusout"},function(e,t){var n=function(e){w.event.simulate(t,e.target,w.event.fix(e))};w.event.special[t]={setup:function(){var r=this.ownerDocument||this,i=J.access(r,t);i||r.addEventListener(e,n,!0),J.access(r,t,(i||0)+1)},teardown:function(){var r=this.ownerDocument||this,i=J.access(r,t)-1;i?J.access(r,t,i):(r.removeEventListener(e,n,!0),J.remove(r,t))}}});var Ct=e.location,Et=Date.now(),kt=/\?/;w.parseXML=function(t){var n;if(!t||"string"!=typeof t)return null;try{n=(new e.DOMParser).parseFromString(t,"text/xml")}catch(e){n=void 0}return n&&!n.getElementsByTagName("parsererror").length||w.error("Invalid XML: "+t),n};var St=/\[\]$/,Dt=/\r?\n/g,Nt=/^(?:submit|button|image|reset|file)$/i,At=/^(?:input|select|textarea|keygen)/i;function jt(e,t,n,r){var i;if(Array.isArray(t))w.each(t,function(t,i){n||St.test(e)?r(e,i):jt(e+"["+("object"==typeof i&&null!=i?t:"")+"]",i,n,r)});else if(n||"object"!==x(t))r(e,t);else for(i in t)jt(e+"["+i+"]",t[i],n,r)}w.param=function(e,t){var n,r=[],i=function(e,t){var n=g(t)?t():t;r[r.length]=encodeURIComponent(e)+"="+encodeURIComponent(null==n?"":n)};if(Array.isArray(e)||e.jquery&&!w.isPlainObject(e))w.each(e,function(){i(this.name,this.value)});else for(n in e)jt(n,e[n],t,i);return r.join("&")},w.fn.extend({serialize:function(){return w.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=w.prop(this,"elements");return e?w.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!w(this).is(":disabled")&&At.test(this.nodeName)&&!Nt.test(e)&&(this.checked||!pe.test(e))}).map(function(e,t){var n=w(this).val();return null==n?null:Array.isArray(n)?w.map(n,function(e){return{name:t.name,value:e.replace(Dt,"\r\n")}}):{name:t.name,value:n.replace(Dt,"\r\n")}}).get()}});var qt=/%20/g,Lt=/#.*$/,Ht=/([?&])_=[^&]*/,Ot=/^(.*?):[ \t]*([^\r\n]*)$/gm,Pt=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Mt=/^(?:GET|HEAD)$/,Rt=/^\/\//,It={},Wt={},$t="*/".concat("*"),Bt=r.createElement("a");Bt.href=Ct.href;function Ft(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var r,i=0,o=t.toLowerCase().match(M)||[];if(g(n))while(r=o[i++])"+"===r[0]?(r=r.slice(1)||"*",(e[r]=e[r]||[]).unshift(n)):(e[r]=e[r]||[]).push(n)}}function _t(e,t,n,r){var i={},o=e===Wt;function a(s){var u;return i[s]=!0,w.each(e[s]||[],function(e,s){var l=s(t,n,r);return"string"!=typeof l||o||i[l]?o?!(u=l):void 0:(t.dataTypes.unshift(l),a(l),!1)}),u}return a(t.dataTypes[0])||!i["*"]&&a("*")}function zt(e,t){var n,r,i=w.ajaxSettings.flatOptions||{};for(n in t)void 0!==t[n]&&((i[n]?e:r||(r={}))[n]=t[n]);return r&&w.extend(!0,e,r),e}function Xt(e,t,n){var r,i,o,a,s=e.contents,u=e.dataTypes;while("*"===u[0])u.shift(),void 0===r&&(r=e.mimeType||t.getResponseHeader("Content-Type"));if(r)for(i in s)if(s[i]&&s[i].test(r)){u.unshift(i);break}if(u[0]in n)o=u[0];else{for(i in n){if(!u[0]||e.converters[i+" "+u[0]]){o=i;break}a||(a=i)}o=o||a}if(o)return o!==u[0]&&u.unshift(o),n[o]}function Ut(e,t,n,r){var i,o,a,s,u,l={},c=e.dataTypes.slice();if(c[1])for(a in e.converters)l[a.toLowerCase()]=e.converters[a];o=c.shift();while(o)if(e.responseFields[o]&&(n[e.responseFields[o]]=t),!u&&r&&e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u=o,o=c.shift())if("*"===o)o=u;else if("*"!==u&&u!==o){if(!(a=l[u+" "+o]||l["* "+o]))for(i in l)if((s=i.split(" "))[1]===o&&(a=l[u+" "+s[0]]||l["* "+s[0]])){!0===a?a=l[i]:!0!==l[i]&&(o=s[0],c.unshift(s[1]));break}if(!0!==a)if(a&&e["throws"])t=a(t);else try{t=a(t)}catch(e){return{state:"parsererror",error:a?e:"No conversion from "+u+" to "+o}}}return{state:"success",data:t}}w.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:Ct.href,type:"GET",isLocal:Pt.test(Ct.protocol),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":$t,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/\bxml\b/,html:/\bhtml/,json:/\bjson\b/},responseFields:{xml:"responseXML",text:"responseText",json:"responseJSON"},converters:{"* text":String,"text html":!0,"text json":JSON.parse,"text xml":w.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?zt(zt(e,w.ajaxSettings),t):zt(w.ajaxSettings,e)},ajaxPrefilter:Ft(It),ajaxTransport:Ft(Wt),ajax:function(t,n){"object"==typeof t&&(n=t,t=void 0),n=n||{};var i,o,a,s,u,l,c,f,p,d,h=w.ajaxSetup({},n),g=h.context||h,y=h.context&&(g.nodeType||g.jquery)?w(g):w.event,v=w.Deferred(),m=w.Callbacks("once memory"),x=h.statusCode||{},b={},T={},C="canceled",E={readyState:0,getResponseHeader:function(e){var t;if(c){if(!s){s={};while(t=Ot.exec(a))s[t[1].toLowerCase()]=t[2]}t=s[e.toLowerCase()]}return null==t?null:t},getAllResponseHeaders:function(){return c?a:null},setRequestHeader:function(e,t){return null==c&&(e=T[e.toLowerCase()]=T[e.toLowerCase()]||e,b[e]=t),this},overrideMimeType:function(e){return null==c&&(h.mimeType=e),this},statusCode:function(e){var t;if(e)if(c)E.always(e[E.status]);else for(t in e)x[t]=[x[t],e[t]];return this},abort:function(e){var t=e||C;return i&&i.abort(t),k(0,t),this}};if(v.promise(E),h.url=((t||h.url||Ct.href)+"").replace(Rt,Ct.protocol+"//"),h.type=n.method||n.type||h.method||h.type,h.dataTypes=(h.dataType||"*").toLowerCase().match(M)||[""],null==h.crossDomain){l=r.createElement("a");try{l.href=h.url,l.href=l.href,h.crossDomain=Bt.protocol+"//"+Bt.host!=l.protocol+"//"+l.host}catch(e){h.crossDomain=!0}}if(h.data&&h.processData&&"string"!=typeof h.data&&(h.data=w.param(h.data,h.traditional)),_t(It,h,n,E),c)return E;(f=w.event&&h.global)&&0==w.active++&&w.event.trigger("ajaxStart"),h.type=h.type.toUpperCase(),h.hasContent=!Mt.test(h.type),o=h.url.replace(Lt,""),h.hasContent?h.data&&h.processData&&0===(h.contentType||"").indexOf("application/x-www-form-urlencoded")&&(h.data=h.data.replace(qt,"+")):(d=h.url.slice(o.length),h.data&&(h.processData||"string"==typeof h.data)&&(o+=(kt.test(o)?"&":"?")+h.data,delete h.data),!1===h.cache&&(o=o.replace(Ht,"$1"),d=(kt.test(o)?"&":"?")+"_="+Et+++d),h.url=o+d),h.ifModified&&(w.lastModified[o]&&E.setRequestHeader("If-Modified-Since",w.lastModified[o]),w.etag[o]&&E.setRequestHeader("If-None-Match",w.etag[o])),(h.data&&h.hasContent&&!1!==h.contentType||n.contentType)&&E.setRequestHeader("Content-Type",h.contentType),E.setRequestHeader("Accept",h.dataTypes[0]&&h.accepts[h.dataTypes[0]]?h.accepts[h.dataTypes[0]]+("*"!==h.dataTypes[0]?", "+$t+"; q=0.01":""):h.accepts["*"]);for(p in h.headers)E.setRequestHeader(p,h.headers[p]);if(h.beforeSend&&(!1===h.beforeSend.call(g,E,h)||c))return E.abort();if(C="abort",m.add(h.complete),E.done(h.success),E.fail(h.error),i=_t(Wt,h,n,E)){if(E.readyState=1,f&&y.trigger("ajaxSend",[E,h]),c)return E;h.async&&h.timeout>0&&(u=e.setTimeout(function(){E.abort("timeout")},h.timeout));try{c=!1,i.send(b,k)}catch(e){if(c)throw e;k(-1,e)}}else k(-1,"No Transport");function k(t,n,r,s){var l,p,d,b,T,C=n;c||(c=!0,u&&e.clearTimeout(u),i=void 0,a=s||"",E.readyState=t>0?4:0,l=t>=200&&t<300||304===t,r&&(b=Xt(h,E,r)),b=Ut(h,b,E,l),l?(h.ifModified&&((T=E.getResponseHeader("Last-Modified"))&&(w.lastModified[o]=T),(T=E.getResponseHeader("etag"))&&(w.etag[o]=T)),204===t||"HEAD"===h.type?C="nocontent":304===t?C="notmodified":(C=b.state,p=b.data,l=!(d=b.error))):(d=C,!t&&C||(C="error",t<0&&(t=0))),E.status=t,E.statusText=(n||C)+"",l?v.resolveWith(g,[p,C,E]):v.rejectWith(g,[E,C,d]),E.statusCode(x),x=void 0,f&&y.trigger(l?"ajaxSuccess":"ajaxError",[E,h,l?p:d]),m.fireWith(g,[E,C]),f&&(y.trigger("ajaxComplete",[E,h]),--w.active||w.event.trigger("ajaxStop")))}return E},getJSON:function(e,t,n){return w.get(e,t,n,"json")},getScript:function(e,t){return w.get(e,void 0,t,"script")}}),w.each(["get","post"],function(e,t){w[t]=function(e,n,r,i){return g(n)&&(i=i||r,r=n,n=void 0),w.ajax(w.extend({url:e,type:t,dataType:i,data:n,success:r},w.isPlainObject(e)&&e))}}),w._evalUrl=function(e){return w.ajax({url:e,type:"GET",dataType:"script",cache:!0,async:!1,global:!1,"throws":!0})},w.fn.extend({wrapAll:function(e){var t;return this[0]&&(g(e)&&(e=e.call(this[0])),t=w(e,this[0].ownerDocument).eq(0).clone(!0),this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstElementChild)e=e.firstElementChild;return e}).append(this)),this},wrapInner:function(e){return g(e)?this.each(function(t){w(this).wrapInner(e.call(this,t))}):this.each(function(){var t=w(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=g(e);return this.each(function(n){w(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(e){return this.parent(e).not("body").each(function(){w(this).replaceWith(this.childNodes)}),this}}),w.expr.pseudos.hidden=function(e){return!w.expr.pseudos.visible(e)},w.expr.pseudos.visible=function(e){return!!(e.offsetWidth||e.offsetHeight||e.getClientRects().length)},w.ajaxSettings.xhr=function(){try{return new e.XMLHttpRequest}catch(e){}};var Vt={0:200,1223:204},Gt=w.ajaxSettings.xhr();h.cors=!!Gt&&"withCredentials"in Gt,h.ajax=Gt=!!Gt,w.ajaxTransport(function(t){var n,r;if(h.cors||Gt&&!t.crossDomain)return{send:function(i,o){var a,s=t.xhr();if(s.open(t.type,t.url,t.async,t.username,t.password),t.xhrFields)for(a in t.xhrFields)s[a]=t.xhrFields[a];t.mimeType&&s.overrideMimeType&&s.overrideMimeType(t.mimeType),t.crossDomain||i["X-Requested-With"]||(i["X-Requested-With"]="XMLHttpRequest");for(a in i)s.setRequestHeader(a,i[a]);n=function(e){return function(){n&&(n=r=s.onload=s.onerror=s.onabort=s.ontimeout=s.onreadystatechange=null,"abort"===e?s.abort():"error"===e?"number"!=typeof s.status?o(0,"error"):o(s.status,s.statusText):o(Vt[s.status]||s.status,s.statusText,"text"!==(s.responseType||"text")||"string"!=typeof s.responseText?{binary:s.response}:{text:s.responseText},s.getAllResponseHeaders()))}},s.onload=n(),r=s.onerror=s.ontimeout=n("error"),void 0!==s.onabort?s.onabort=r:s.onreadystatechange=function(){4===s.readyState&&e.setTimeout(function(){n&&r()})},n=n("abort");try{s.send(t.hasContent&&t.data||null)}catch(e){if(n)throw e}},abort:function(){n&&n()}}}),w.ajaxPrefilter(function(e){e.crossDomain&&(e.contents.script=!1)}),w.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/\b(?:java|ecma)script\b/},converters:{"text script":function(e){return w.globalEval(e),e}}}),w.ajaxPrefilter("script",function(e){void 0===e.cache&&(e.cache=!1),e.crossDomain&&(e.type="GET")}),w.ajaxTransport("script",function(e){if(e.crossDomain){var t,n;return{send:function(i,o){t=w("