diff --git a/.forgejo/workflows/build-on-commit.yml b/.forgejo/workflows/build-on-commit.yml deleted file mode 100644 index e8f0d2e3..00000000 --- a/.forgejo/workflows/build-on-commit.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Build Docker Image on Commit - -on: - push: - branches: - - main - tags: - - '!' # Exclude tags - -jobs: - build-and-publish: - runs-on: docker-builder - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set REPO_VARS - id: repo-url - run: | - echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV - echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV - - - name: Login to OCI registry - run: | - echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin - - - name: Build and push Docker images - run: | - # Build Docker image with commit SHA - docker build -t $REPO_HOST/$REPO_PATH:${{ github.sha }} . - docker push $REPO_HOST/$REPO_PATH:${{ github.sha }} - - # Build Docker image with nightly tag - docker tag $REPO_HOST/$REPO_PATH:${{ github.sha }} $REPO_HOST/$REPO_PATH:nightly - docker push $REPO_HOST/$REPO_PATH:nightly - - # Remove local images to save storage - docker rmi $REPO_HOST/$REPO_PATH:${{ github.sha }} - docker rmi $REPO_HOST/$REPO_PATH:nightly diff --git a/.forgejo/workflows/build-on-tag.yml b/.forgejo/workflows/build-on-tag.yml deleted file mode 100644 index 888102b6..00000000 --- a/.forgejo/workflows/build-on-tag.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Build and Publish Docker Image on Tag - -on: - push: - tags: - - '*' - -jobs: - build-and-publish: - runs-on: docker-builder - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set REPO_VARS - id: repo-url - run: | - echo "REPO_HOST=$(echo "${{ github.server_url }}" | sed 's~http[s]*://~~g')" >> $GITHUB_ENV - echo "REPO_PATH=${{ github.repository }}" >> $GITHUB_ENV - - - name: Login to OCI registry - run: | - echo "${{ secrets.OCI_TOKEN }}" | docker login $REPO_HOST -u "${{ secrets.OCI_USER }}" --password-stdin - - - name: Build and push Docker image - run: | - TAG=${{ github.ref_name }} # Get the tag name from the context - # Build and push multi-platform Docker images - docker build -t $REPO_HOST/$REPO_PATH:$TAG --push . - # Tag and push latest - docker tag $REPO_HOST/$REPO_PATH:$TAG $REPO_HOST/$REPO_PATH:latest - docker push $REPO_HOST/$REPO_PATH:latest - - # Remove the local image to save storage - docker rmi $REPO_HOST/$REPO_PATH:$TAG - docker rmi $REPO_HOST/$REPO_PATH:latest \ No newline at end of file diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index aab991d5..00000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,10 +0,0 @@ -github: canewsin -patreon: # Replace with a single Patreon username e.g., user1 -open_collective: # Replace with a single Open Collective username e.g., user1 -ko_fi: canewsin -tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel -community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry -liberapay: canewsin -issuehunt: # Replace with a single IssueHunt username e.g., user1 -otechie: # Replace with a single Otechie username e.g., user1 -custom: ['https://paypal.me/PramUkesh', 'https://zerolink.ml/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/'] diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md deleted file mode 100644 index b97ad556..00000000 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve ZeroNet -title: '' -labels: '' -assignees: '' - ---- - -### Step 1: Please describe your environment - - * ZeroNet version: _____ - * Operating system: _____ - * Web browser: _____ - * Tor status: not available/always/disabled - * Opened port: yes/no - * Special configuration: ____ - -### Step 2: Describe the problem: - -#### Steps to reproduce: - - 1. _____ - 2. _____ - 3. _____ - -#### Observed Results: - - * What happened? This could be a screenshot, a description, log output (you can send log/debug.log file to hello@zeronet.io if necessary), etc. - -#### Expected Results: - - * What did you expect to happen? diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index fe7c8178..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for ZeroNet -title: '' -labels: '' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 27b5c924..00000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,72 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ py3-latest ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ py3-latest ] - schedule: - - cron: '32 19 * * 2' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'javascript', 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 2bdcaf95..00000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: tests - -on: [push, pull_request] - -jobs: - test: - runs-on: ubuntu-20.04 - strategy: - max-parallel: 16 - matrix: - python-version: ["3.7", "3.8", "3.9"] - - steps: - - name: Checkout ZeroNet - uses: actions/checkout@v2 - with: - submodules: "true" - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Prepare for installation - run: | - python3 -m pip install setuptools - python3 -m pip install --upgrade pip wheel - python3 -m pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - - - name: Install - run: | - python3 -m pip install --upgrade -r requirements.txt - python3 -m pip list - - - name: Prepare for tests - run: | - openssl version -a - echo 0 | sudo tee /proc/sys/net/ipv6/conf/all/disable_ipv6 - - - name: Test - run: | - catchsegv python3 -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini - export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python3 -m pytest -x plugins/CryptMessage/Test - export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python3 -m pytest -x plugins/Bigfile/Test - export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python3 -m pytest -x plugins/AnnounceLocal/Test - export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python3 -m pytest -x plugins/OptionalManager/Test - export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test - export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test - find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ diff --git a/.gitignore b/.gitignore index 636cd115..1b4c0123 100644 --- a/.gitignore +++ b/.gitignore @@ -7,14 +7,9 @@ __pycache__/ # Hidden files .* -!/.forgejo -!/.github !/.gitignore !/.travis.yml -!/.gitlab-ci.yml -# Temporary files -*.bak # Data dir data/* @@ -23,14 +18,13 @@ data/* # Virtualenv env/* -# Tor data +# Tor, downloaded automatically tools/tor/data +tools/tor/*exe +tools/tor/*dll # PhantomJS, downloaded manually for unit tests tools/phantomjs # ZeroNet config file zeronet.conf - -# ZeroNet log files -log/* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index f3e1ed29..00000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,48 +0,0 @@ -stages: - - test - -.test_template: &test_template - stage: test - before_script: - - pip install --upgrade pip wheel - # Selenium and requests can't be installed without a requests hint on Python 3.4 - - pip install --upgrade requests>=2.22.0 - - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - - pip install --upgrade -r requirements.txt - script: - - pip list - - openssl version -a - - python -m pytest -x plugins/CryptMessage/Test --color=yes - - python -m pytest -x plugins/Bigfile/Test --color=yes - - python -m pytest -x plugins/AnnounceLocal/Test --color=yes - - python -m pytest -x plugins/OptionalManager/Test --color=yes - - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini --color=yes - - mv plugins/disabled-Multiuser plugins/Multiuser - - python -m pytest -x plugins/Multiuser/Test --color=yes - - mv plugins/disabled-Bootstrapper plugins/Bootstrapper - - python -m pytest -x plugins/Bootstrapper/Test --color=yes - - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ - -test:py3.4: - image: python:3.4.3 - <<: *test_template - -test:py3.5: - image: python:3.5.7 - <<: *test_template - -test:py3.6: - image: python:3.6.9 - <<: *test_template - -test:py3.7-openssl1.1.0: - image: python:3.7.0b5 - <<: *test_template - -test:py3.7-openssl1.1.1: - image: python:3.7.4 - <<: *test_template - -test:py3.8: - image: python:3.8.0b3 - <<: *test_template \ No newline at end of file diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 2c602a5a..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "plugins"] - path = plugins - url = https://github.com/ZeroNetX/ZeroNet-Plugins.git diff --git a/.travis.yml b/.travis.yml index bdaafa22..34f7706d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,47 +1,25 @@ language: python python: - - 3.4 - - 3.5 - - 3.6 - - 3.7 - - 3.8 + - 2.7 services: - docker -cache: pip -before_install: - - pip install --upgrade pip wheel - - pip install --upgrade codecov coveralls flake8 mock pytest==4.6.3 pytest-cov selenium - # - docker build -t zeronet . - # - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet install: - - pip install --upgrade -r requirements.txt - - pip list + - pip install -U pip wheel + - pip install -r requirements.txt before_script: - openssl version -a - # Add an IPv6 config - see the corresponding Travis issue - # https://github.com/travis-ci/travis-ci/issues/8361 - - if [ "${TRAVIS_OS_NAME}" == "linux" ]; then - sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6'; - fi script: - - catchsegv python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini - - export ZERONET_LOG_DIR="log/CryptMessage"; catchsegv python -m pytest -x plugins/CryptMessage/Test - - export ZERONET_LOG_DIR="log/Bigfile"; catchsegv python -m pytest -x plugins/Bigfile/Test - - export ZERONET_LOG_DIR="log/AnnounceLocal"; catchsegv python -m pytest -x plugins/AnnounceLocal/Test - - export ZERONET_LOG_DIR="log/OptionalManager"; catchsegv python -m pytest -x plugins/OptionalManager/Test - - export ZERONET_LOG_DIR="log/Multiuser"; mv plugins/disabled-Multiuser plugins/Multiuser && catchsegv python -m pytest -x plugins/Multiuser/Test - - export ZERONET_LOG_DIR="log/Bootstrapper"; mv plugins/disabled-Bootstrapper plugins/Bootstrapper && catchsegv python -m pytest -x plugins/Bootstrapper/Test - - find src -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - - find plugins -name "*.json" | xargs -n 1 python3 -c "import json, sys; print(sys.argv[1], end=' '); json.load(open(sys.argv[1])); print('[OK]')" - - flake8 . --count --select=E9,F63,F72,F82 --show-source --statistics --exclude=src/lib/pyaes/ -after_failure: - - zip -r log.zip log/ - - curl --upload-file ./log.zip https://transfer.sh/log.zip + - python -m pytest plugins/CryptMessage/Test + - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini +before_install: + - pip install -U pytest mock pytest-cov selenium + - pip install codecov + - pip install coveralls + - docker build -t zeronet . + - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet after_success: - codecov - coveralls --rcfile=src/Test/coverage.ini -notifications: - email: - recipients: - hello@zeronet.io - on_success: change +cache: + directories: + - $HOME/.cache/pip diff --git a/CHANGELOG.md b/CHANGELOG.md index 6974d18a..6955f642 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,469 +1,3 @@ -### ZeroNet 0.9.0 (2023-07-12) Rev4630 - - Fix RDos Issue in Plugins https://github.com/ZeroNetX/ZeroNet-Plugins/pull/9 - - Add trackers to Config.py for failsafety incase missing trackers.txt - - Added Proxy links - - Fix pysha3 dep installation issue - - FileRequest -> Remove Unnecessary check, Fix error wording - - Fix Response when site is missing for `actionAs` - - -### ZeroNet 0.8.5 (2023-02-12) Rev4625 - - Fix(https://github.com/ZeroNetX/ZeroNet/pull/202) for SSL cert gen failed on Windows. - - default theme-class for missing value in `users.json`. - - Fetch Stats Plugin changes. - -### ZeroNet 0.8.4 (2022-12-12) Rev4620 - - Increase Minimum Site size to 25MB. - -### ZeroNet 0.8.3 (2022-12-11) Rev4611 - - main.py -> Fix accessing unassigned varible - - ContentManager -> Support for multiSig - - SiteStrorage.py -> Fix accessing unassigned varible - - ContentManager.py Improve Logging of Valid Signers - -### ZeroNet 0.8.2 (2022-11-01) Rev4610 - - Fix Startup Error when plugins dir missing - - Move trackers to seperate file & Add more trackers - - Config:: Skip loading missing tracker files - - Added documentation for getRandomPort fn - -### ZeroNet 0.8.1 (2022-10-01) Rev4600 - - fix readdress loop (cherry-pick previously added commit from conservancy) - - Remove Patreon badge - - Update README-ru.md (#177) - - Include inner_path of failed request for signing in error msg and response - - Don't Fail Silently When Cert is Not Selected - - Console Log Updates, Specify min supported ZeroNet version for Rust version Protocol Compatibility - - Update FUNDING.yml - -### ZeroNet 0.8.0 (2022-05-27) Rev4591 - - Revert File Open to catch File Access Errors. - -### ZeroNet 0.7.9-patch (2022-05-26) Rev4586 - - Use xescape(s) from zeronet-conservancy - - actionUpdate response Optimisation - - Fetch Plugins Repo Updates - - Fix Unhandled File Access Errors - - Create codeql-analysis.yml - -### ZeroNet 0.7.9 (2022-05-26) Rev4585 - - Rust Version Compatibility for update Protocol msg - - Removed Non Working Trakers. - - Dynamically Load Trackers from Dashboard Site. - - Tracker Supply Improvements. - - Fix Repo Url for Bug Report - - First Party Tracker Update Service using Dashboard Site. - - remove old v2 onion service [#158](https://github.com/ZeroNetX/ZeroNet/pull/158) - -### ZeroNet 0.7.8 (2022-03-02) Rev4580 - - Update Plugins with some bug fixes and Improvements - -### ZeroNet 0.7.6 (2022-01-12) Rev4565 - - Sync Plugin Updates - - Clean up tor v3 patch [#115](https://github.com/ZeroNetX/ZeroNet/pull/115) - - Add More Default Plugins to Repo - - Doubled Site Publish Limits - - Update ZeroNet Repo Urls [#103](https://github.com/ZeroNetX/ZeroNet/pull/103) - - UI/UX: Increases Size of Notifications Close Button [#106](https://github.com/ZeroNetX/ZeroNet/pull/106) - - Moved Plugins to Seperate Repo - - Added `access_key` variable in Config, this used to access restrited plugins when multiuser plugin is enabled. When MultiUserPlugin is enabled we cannot access some pages like /Stats, this key will remove such restriction with access key. - - Added `last_connection_id_current_version` to ConnectionServer, helpful to estimate no of connection from current client version. - - Added current version: connections to /Stats page. see the previous point. - -### ZeroNet 0.7.5 (2021-11-28) Rev4560 - - Add more default trackers - - Change default homepage address to `1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d` - - Change default update site address to `1Update8crprmciJHwp2WXqkx2c4iYp18` - -### ZeroNet 0.7.3 (2021-11-28) Rev4555 - - Fix xrange is undefined error - - Fix Incorrect viewport on mobile while loading - - Tor-V3 Patch by anonymoose - - -### ZeroNet 0.7.1 (2019-07-01) Rev4206 -### Added - - Built-in logging console in the web UI to see what's happening in the background. (pull down top-right 0 button to see it) - - Display database rebuild errors [Thanks to Lola] - - New plugin system that allows to install and manage builtin/third party extensions to the ZeroNet client using the web interface. - - Support multiple trackers_file - - Add OpenSSL 1.1 support to CryptMessage plugin based on Bitmessage modifications [Thanks to radfish] - - Display visual error message on startup errors - - Fix max opened files changing on Windows platform - - Display TLS1.3 compatibility on /Stats page - - Add fake SNI and ALPN to peer connections to make it more like standard https connections - - Hide and ignore tracker_proxy setting in Tor: Always mode as it's going to use Tor anyway. - - Deny websocket connections from unknown origins - - Restrict open_browser values to avoid RCE on sandbox escape - - Offer access web interface by IP address in case of unknown host - - Link to site's sidebar with "#ZeroNet:OpenSidebar" hash - -### Changed - - Allow .. in file names [Thanks to imachug] - - Change unstable trackers - - More clean errors on sites.json/users.json load error - - Various tweaks for tracker rating on unstable connections - - Use OpenSSL 1.1 dlls from default Python Windows distribution if possible - - Re-factor domain resolving for easier domain plugins - - Disable UDP connections if --proxy is used - - New, decorator-based Websocket API permission system to avoid future typo mistakes - -### Fixed - - Fix parsing config lines that have no value - - Fix start.py [Thanks to imachug] - - Allow multiple values of the same key in the config file [Thanks ssdifnskdjfnsdjk for reporting] - - Fix parsing config file lines that has % in the value [Thanks slrslr for reporting] - - Fix bootstrapper plugin hash reloads [Thanks geekless for reporting] - - Fix CryptMessage plugin OpenSSL dll loading on Windows (ZeroMail errors) [Thanks cxgreat2014 for reporting] - - Fix startup error when using OpenSSL 1.1 [Thanks to imachug] - - Fix a bug that did not loaded merged site data for 5 sec after the merged site got added - - Fix typo that allowed to add new plugins in public proxy mode. [Thanks styromaniac for reporting] - - Fix loading non-big files with "|all" postfix [Thanks to krzotr] - - Fix OpenSSL cert generation error crash by change Windows console encoding to utf8 - -#### Wrapper html injection vulnerability [Reported by ivanq] - -In ZeroNet before rev4188 the wrapper template variables was rendered incorrectly. - -Result: The opened site was able to gain WebSocket connection with unrestricted ADMIN/NOSANDBOX access, change configuration values and possible RCE on client's machine. - -Fix: Fixed the template rendering code, disallowed WebSocket connections from unknown locations, restricted open_browser configuration values to avoid possible RCE in case of sandbox escape. - -Note: The fix is also back ported to ZeroNet Py 2.x version (Rev3870) - - -### ZeroNet 0.7.0 (2019-06-12) Rev4106 (First release targeting Python 3.4+) -### Added - - 5-10x faster signature verification by using libsecp256k1 (Thanks to ZeroMux) - - Generated SSL certificate randomization to avoid protocol filters (Thanks to ValdikSS) - - Offline mode - - P2P source code update using ZeroNet protocol - - ecdsaSign/Verify commands to CryptMessage plugin (Thanks to imachug) - - Efficient file rename: change file names instead of re-downloading the file. - - Make redirect optional on site cloning (Thanks to Lola) - - EccPrivToPub / EccPubToPriv functions (Thanks to imachug) - - Detect and change dark/light theme based on OS setting (Thanks to filips123) - -### Changed - - Re-factored code to Python3 runtime (compatible with Python 3.4-3.8) - - More safe database sync mode - - Removed bundled third-party libraries where it's possible - - Use lang=en instead of lang={lang} in urls to avoid url encode problems - - Remove environment details from error page - - Don't push content.json updates larger than 10kb to significantly reduce bw usage for site with many files - -### Fixed - - Fix sending files with \0 characters - - Security fix: Escape error detail to avoid XSS (reported by krzotr) - - Fix signature verification using libsecp256k1 for compressed addresses (mostly certificates generated in the browser) - - Fix newsfeed if you have more than 1000 followed topic/post on one site. - - Fix site download as zip file - - Fix displaying sites with utf8 title - - Error message if dbRebuild fails (Thanks to Lola) - - Fix browser reopen if executing start.py again. (Thanks to imachug) - - -### ZeroNet 0.6.5 (2019-02-16) Rev3851 (Last release targeting Python 2.7.x) -### Added - - IPv6 support in peer exchange, bigfiles, optional file finding, tracker sharing, socket listening and connecting (based on tangdou1 modifications) - - New tracker database format with IPv6 support - - Display notification if there is an unpublished modification for your site - - Listen and shut down normally for SIGTERM (Thanks to blurHY) - - Support tilde `~` in filenames (by d14na) - - Support map for Namecoin subdomain names (Thanks to lola) - - Add log level to config page - - Support `{data}` for data dir variable in trackers_file value - - Quick check content.db on startup and rebuild if necessary - - Don't show meek proxy option if the tor client does not supports it - -### Changed - - Refactored port open checking with IPv6 support - - Consider non-local IPs as external even is the open port check fails (for CJDNS and Yggdrasil support) - - Add IPv6 tracker and change unstable tracker - - Don't correct sent local time with the calculated time correction - - Disable CSP for Edge - - Only support CREATE commands in dbschema indexes node and SELECT from storage.query - -### Fixed - - Check the length of master seed when executing cryptGetPrivatekey CLI command - - Only reload source code on file modification / creation - - Detection and issue warning for latest no-script plugin - - Fix atomic write of a non-existent file - - Fix sql queries with lots of variables and sites with lots of content.json - - Fix multi-line parsing of zeronet.conf - - Fix site deletion from users.json - - Fix site cloning before site downloaded (Reported by unsystemizer) - - Fix queryJson for non-list nodes (Reported by MingchenZhang) - - -## ZeroNet 0.6.4 (2018-10-20) Rev3660 -### Added - - New plugin: UiConfig. A web interface that allows changing ZeroNet settings. - - New plugin: AnnounceShare. Share trackers between users, automatically announce client's ip as tracker if Bootstrapper plugin is enabled. - - Global tracker stats on ZeroHello: Include statistics from all served sites instead of displaying request statistics only for one site. - - Support custom proxy for trackers. (Configurable with /Config) - - Adding peers to sites manually using zeronet_peers get parameter - - Copy site address with peers link on the sidebar. - - Zip file listing and streaming support for Bigfiles. - - Tracker statistics on /Stats page - - Peer reputation save/restore to speed up sync time after startup. - - Full support fileGet, fileList, dirList calls on tar.gz/zip files. - - Archived_before support to user content rules to allow deletion of all user files before the specified date - - Show and manage "Connecting" sites on ZeroHello - - Add theme support to ZeroNet sites - - Dark theme for ZeroHello, ZeroBlog, ZeroTalk - -### Changed - - Dynamic big file allocation: More efficient storage usage by don't pre-allocate the whole file at the beginning, but expand the size as the content downloads. - - Reduce the request frequency to unreliable trackers. - - Only allow 5 concurrent checkSites to run in parallel to reduce load under Tor/slow connection. - - Stop site downloading if it reached 95% of site limit to avoid download loop for sites out of limit - - The pinned optional files won't be removed from download queue after 30 retries and won't be deleted even if the site owner removes it. - - Don't remove incomplete (downloading) sites on startup - - Remove --pin_bigfile argument as big files are automatically excluded from optional files limit. - -### Fixed - - Trayicon compatibility with latest gevent - - Request number counting for zero:// trackers - - Peer reputation boost for zero:// trackers. - - Blocklist of peers loaded from peerdb (Thanks tangdou1 for report) - - Sidebar map loading on foreign languages (Thx tangdou1 for report) - - FileGet on non-existent files (Thanks mcdev for reporting) - - Peer connecting bug for sites with low amount of peers - -#### "The Vacation" Sandbox escape bug [Reported by GitCenter / Krixano / ZeroLSTN] - -In ZeroNet 0.6.3 Rev3615 and earlier as a result of invalid file type detection, a malicious site could escape the iframe sandbox. - -Result: Browser iframe sandbox escape - -Applied fix: Replaced the previous, file extension based file type identification with a proper one. - -Affected versions: All versions before ZeroNet Rev3616 - - -## ZeroNet 0.6.3 (2018-06-26) -### Added - - New plugin: ContentFilter that allows to have shared site and user block list. - - Support Tor meek proxies to avoid tracker blocking of GFW - - Detect network level tracker blocking and easy setting meek proxy for tracker connections. - - Support downloading 2GB+ sites as .zip (Thx to Radtoo) - - Support ZeroNet as a transparent proxy (Thx to JeremyRand) - - Allow fileQuery as CORS command (Thx to imachug) - - Windows distribution includes Tor and meek client by default - - Download sites as zip link to sidebar - - File server port randomization - - Implicit SSL for all connection - - fileList API command for zip files - - Auto download bigfiles size limit on sidebar - - Local peer number to the sidebar - - Open site directory button in sidebar - -### Changed - - Switched to Azure Tor meek proxy as Amazon one became unavailable - - Refactored/rewritten tracker connection manager - - Improved peer discovery for optional files without opened port - - Also delete Bigfile's piecemap on deletion - -### Fixed - - Important security issue: Iframe sandbox escape [Reported by Ivanq / gitcenter] - - Local peer discovery when running multiple clients on the same machine - - Uploading small files with Bigfile plugin - - Ctrl-c shutdown when running CLI commands - - High CPU/IO usage when Multiuser plugin enabled - - Firefox back button - - Peer discovery on older Linux kernels - - Optional file handling when multiple files have the same hash_id (first 4 chars of the hash) - - Msgpack 0.5.5 and 0.5.6 compatibility - -## ZeroNet 0.6.2 (2018-02-18) - -### Added - - New plugin: AnnounceLocal to make ZeroNet work without an internet connection on the local network. - - Allow dbQuey and userGetSettings using the `as` API command on different sites with Cors permission - - New config option: `--log_level` to reduce log verbosity and IO load - - Prefer to connect to recent peers from trackers first - - Mark peers with port 1 is also unconnectable for future fix for trackers that do not support port 0 announce - -### Changed - - Don't keep connection for sites that have not been modified in the last week - - Change unreliable trackers to new ones - - Send maximum 10 findhash request in one find optional files round (15sec) - - Change "Unique to site" to "No certificate" for default option in cert selection dialog. - - Dont print warnings if not in debug mode - - Generalized tracker logging format - - Only recover sites from sites.json if they had peers - - Message from local peers does not means internet connection - - Removed `--debug_gevent` and turned on Gevent block logging by default - -### Fixed - - Limit connections to 512 to avoid reaching 1024 limit on windows - - Exception when logging foreign operating system socket errors - - Don't send private (local) IPs on pex - - Don't connect to private IPs in tor always mode - - Properly recover data from msgpack unpacker on file stream start - - Symlinked data directory deletion when deleting site using Windows - - De-duplicate peers before publishing - - Bigfile info for non-existing files - - -## ZeroNet 0.6.1 (2018-01-25) - -### Added - - New plugin: Chart - - Collect and display charts about your contribution to ZeroNet network - - Allow list as argument replacement in sql queries. (Thanks to imachug) - - Newsfeed query time statistics (Click on "From XX sites in X.Xs on ZeroHello) - - New UiWebsocket API command: As to run commands as other site - - Ranged ajax queries for big files - - Filter feed by type and site address - - FileNeed, Bigfile upload command compatibility with merger sites - - Send event on port open / tor status change - - More description on permission request - -### Changed - - Reduce memory usage of sidebar geoip database cache - - Change unreliable tracker to new one - - Don't display Cors permission ask if it already granted - - Avoid UI blocking when rebuilding a merger site - - Skip listing ignored directories on signing - - In Multiuser mode show the seed welcome message when adding new certificate instead of first visit - - Faster async port opening on multiple network interfaces - - Allow javascript modals - - Only zoom sidebar globe if mouse button is pressed down - -### Fixed - - Open port checking error reporting (Thanks to imachug) - - Out-of-range big file requests - - Don't output errors happened on gevent greenlets twice - - Newsfeed skip sites with no database - - Newsfeed queries with multiple params - - Newsfeed queries with UNION and UNION ALL - - Fix site clone with sites larger that 10MB - - Unreliable Websocket connection when requesting files from different sites at the same time - - -## ZeroNet 0.6.0 (2017-10-17) - -### Added - - New plugin: Big file support - - Automatic pinning on Big file download - - Enable TCP_NODELAY for supporting sockets - - actionOptionalFileList API command arguments to list non-downloaded files or only big files - - serverShowdirectory API command arguments to allow to display site's directory in OS file browser - - fileNeed API command to initialize optional file downloading - - wrapperGetAjaxKey API command to request nonce for AJAX request - - Json.gz support for database files - - P2P port checking (Thanks for grez911) - - `--download_optional auto` argument to enable automatic optional file downloading for newly added site - - Statistics for big files and protocol command requests on /Stats - - Allow to set user limitation based on auth_address - -### Changed - - More aggressive and frequent connection timeout checking - - Use out of msgpack context file streaming for files larger than 512KB - - Allow optional files workers over the worker limit - - Automatic redirection to wrapper on nonce_error - - Send websocket event on optional file deletion - - Optimize sites.json saving - - Enable faster C-based msgpack packer by default - - Major optimization on Bootstrapper plugin SQL queries - - Don't reset bad file counter on restart, to allow easier give up on unreachable files - - Incoming connection limit changed from 1000 to 500 to avoid reaching socket limit on Windows - - Changed tracker boot.zeronet.io domain, because zeronet.io got banned in some countries - -#### Fixed - - Sub-directories in user directories - -## ZeroNet 0.5.7 (2017-07-19) -### Added - - New plugin: CORS to request read permission to other site's content - - New API command: userSetSettings/userGetSettings to store site's settings in users.json - - Avoid file download if the file size does not match with the requested one - - JavaScript and wrapper less file access using /raw/ prefix ([Example](http://127.0.0.1:43110/raw/1AsRLpuRxr3pb9p3TKoMXPSWHzh6i7fMGi/en.tar.gz/index.html)) - - --silent command line option to disable logging to stdout - - -### Changed - - Better error reporting on sign/verification errors - - More test for sign and verification process - - Update to OpenSSL v1.0.2l - - Limit compressed files to 6MB to avoid zip/tar.gz bomb - - Allow space, [], () characters in filenames - - Disable cross-site resource loading to improve privacy. [Reported by Beardog108] - - Download directly accessed Pdf/Svg/Swf files instead of displaying them to avoid wrapper escape using in JS in SVG file. [Reported by Beardog108] - - Disallow potentially unsafe regular expressions to avoid ReDoS [Reported by MuxZeroNet] - -### Fixed - - Detecting data directory when running Windows distribution exe [Reported by Plasmmer] - - OpenSSL loading under Android 6+ - - Error on exiting when no connection server started - - -## ZeroNet 0.5.6 (2017-06-15) -### Added - - Callback for certSelect API command - - More compact list formatting in json - -### Changed - - Remove obsolete auth_key_sha512 and signature format - - Improved Spanish translation (Thanks to Pupiloho) - -### Fixed - - Opened port checking (Thanks l5h5t7 & saber28 for reporting) - - Standalone update.py argument parsing (Thanks Zalex for reporting) - - uPnP crash on startup (Thanks Vertux for reporting) - - CoffeeScript 1.12.6 compatibility (Thanks kavamaken & imachug) - - Multi value argument parsing - - Database error when running from directory that contains special characters (Thanks Pupiloho for reporting) - - Site lock violation logging - - -#### Proxy bypass during source upgrade [Reported by ZeroMux] - -In ZeroNet before 0.5.6 during the client's built-in source code upgrade mechanism, -ZeroNet did not respect Tor and/or proxy settings. - -Result: ZeroNet downloaded the update without using the Tor network and potentially leaked the connections. - -Fix: Removed the problematic code line from the updater that removed the proxy settings from the socket library. - -Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6 - - -#### XSS vulnerability using DNS rebinding. [Reported by Beardog108] - -In ZeroNet before 0.5.6 the web interface did not validate the request's Host parameter. - -Result: An attacker using a specially crafted DNS entry could have bypassed the browser's cross-site-scripting protection -and potentially gained access to user's private data stored on site. - -Fix: By default ZeroNet only accept connections from 127.0.0.1 and localhost hosts. -If you bind the ui server to an external interface, then it also adds the first http request's host to the allowed host list -or you can define it manually using --ui_host. - -Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6 - - -## ZeroNet 0.5.5 (2017-05-18) -### Added -- Outgoing socket binding by --bind parameter -- Database rebuilding progress bar -- Protect low traffic site's peers from cleanup closing -- Local site blacklisting -- Cloned site source code upgrade from parent -- Input placeholder support for displayPrompt -- Alternative interaction for wrapperConfirm - -### Changed -- New file priorities for faster site display on first visit -- Don't add ? to url if push/replaceState url starts with # - -### Fixed -- PermissionAdd/Remove admin command requirement -- Multi-line confirmation dialog - - ## ZeroNet 0.5.4 (2017-04-14) ### Added - Major speed and CPU usage enhancements in Tor always mode diff --git a/Dockerfile b/Dockerfile index 3f1d3c18..350fff9f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,33 +1,32 @@ -FROM alpine:3.15 +FROM ubuntu:16.04 + +MAINTAINER Felix Imobersteg #Base settings +ENV DEBIAN_FRONTEND noninteractive ENV HOME /root -COPY requirements.txt /root/requirements.txt - #Install ZeroNet -RUN apk --update --no-cache --no-progress add python3 python3-dev py3-pip gcc g++ autoconf automake libtool libffi-dev musl-dev make tor openssl \ - && pip3 install -r /root/requirements.txt \ - && apk del python3-dev gcc g++ autoconf automake libtool libffi-dev musl-dev make \ - && echo "ControlPort 9051" >> /etc/tor/torrc \ - && echo "CookieAuthentication 1" >> /etc/tor/torrc - -RUN python3 -V \ - && python3 -m pip list \ - && tor --version \ - && openssl version +RUN \ + apt-get update -y; \ + apt-get -y install msgpack-python python-gevent python-pip python-dev tor; \ + pip install msgpack-python --upgrade; \ + apt-get clean -y; \ + rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*; \ + echo "ControlPort 9051" >> /etc/tor/torrc; \ + echo "CookieAuthentication 1" >> /etc/tor/torrc + #Add Zeronet source -COPY . /root +ADD . /root VOLUME /root/data #Control if Tor proxy is started -ENV ENABLE_TOR true - -WORKDIR /root +ENV ENABLE_TOR false #Set upstart command -CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26117 +CMD cd /root && (! ${ENABLE_TOR} || /etc/init.d/tor start) && python zeronet.py --ui_ip 0.0.0.0 #Expose ports -EXPOSE 43110 26117 +EXPOSE 43110 +EXPOSE 15441 diff --git a/Dockerfile.arm64v8 b/Dockerfile.arm64v8 deleted file mode 100644 index d27b7620..00000000 --- a/Dockerfile.arm64v8 +++ /dev/null @@ -1,34 +0,0 @@ -FROM alpine:3.12 - -#Base settings -ENV HOME /root - -COPY requirements.txt /root/requirements.txt - -#Install ZeroNet -RUN apk --update --no-cache --no-progress add python3 python3-dev gcc libffi-dev musl-dev make tor openssl \ - && pip3 install -r /root/requirements.txt \ - && apk del python3-dev gcc libffi-dev musl-dev make \ - && echo "ControlPort 9051" >> /etc/tor/torrc \ - && echo "CookieAuthentication 1" >> /etc/tor/torrc - -RUN python3 -V \ - && python3 -m pip list \ - && tor --version \ - && openssl version - -#Add Zeronet source -COPY . /root -VOLUME /root/data - -#Control if Tor proxy is started -ENV ENABLE_TOR false - -WORKDIR /root - -#Set upstart command -CMD (! ${ENABLE_TOR} || tor&) && python3 zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552 - -#Expose ports -EXPOSE 43110 26552 - diff --git a/LICENSE b/LICENSE index 0d17b72d..d6a93266 100644 --- a/LICENSE +++ b/LICENSE @@ -1,27 +1,340 @@ -This program is free software: you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation, version 3. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program. If not, see . - - -Additional Conditions : - -Contributing to this repo - This repo is governed by GPLv3, same is located at the root of the ZeroNet git repo, - unless specified separately all code is governed by that license, contributions to this repo - are divided into two key types, key contributions and non-key contributions, key contributions - are which, directly affects the code performance, quality and features of software, - non key contributions include things like translation datasets, image, graphic or video - contributions that does not affect the main usability of software but improves the existing - usability of certain thing or feature, these also include tests written with code, since their - purpose is to check, whether something is working or not as intended. All the non-key contributions - are governed by [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), unless specified - above, a contribution is ruled by the type of contribution if there is a conflict between two - contributing parties of repo in any case. +GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {description} + Copyright (C) {year} {fullname} + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + {signature of Ty Coon}, 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. + diff --git a/README-ru.md b/README-ru.md deleted file mode 100644 index 7d557727..00000000 --- a/README-ru.md +++ /dev/null @@ -1,133 +0,0 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) - -[简体中文](./README-zh-cn.md) -[English](./README.md) - -Децентрализованные вебсайты, использующие криптографию Bitcoin и протокол BitTorrent — https://zeronet.dev ([Зеркало в ZeroNet](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/)). В отличии от Bitcoin, ZeroNet'у не требуется блокчейн для работы, однако он использует ту же криптографию, чтобы обеспечить сохранность и проверку данных. - -## Зачем? - -- Мы верим в открытую, свободную, и неподдающуюся цензуре сеть и связь. -- Нет единой точки отказа: Сайт остаётся онлайн, пока его обслуживает хотя бы 1 пир. -- Нет затрат на хостинг: Сайты обслуживаются посетителями. -- Невозможно отключить: Он нигде, потому что он везде. -- Скорость и возможность работать без Интернета: Вы сможете получить доступ к сайту, потому что его копия хранится на вашем компьютере и у ваших пиров. - -## Особенности - -- Обновление сайтов в реальном времени -- Поддержка доменов `.bit` ([Namecoin](https://www.namecoin.org)) -- Легкая установка: просто распакуйте и запустите -- Клонирование сайтов "в один клик" -- Беспарольная [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - авторизация: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек -- Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы -- Анонимность: Полная поддержка сети Tor, используя скрытые службы `.onion` вместо адресов IPv4 -- Зашифрованное TLS подключение -- Автоматическое открытие UPnP–порта -- Плагин для поддержки нескольких пользователей (openproxy) -- Работа с любыми браузерами и операционными системами - -## Текущие ограничения - -- Файловые транзакции не сжаты -- Нет приватных сайтов - -## Как это работает? - -- После запуска `zeronet.py` вы сможете посещать сайты в ZeroNet, используя адрес - `http://127.0.0.1:43110/{zeronet_адрес}` - (Например: `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). -- Когда вы посещаете новый сайт в ZeroNet, он пытается найти пиров с помощью протокола BitTorrent, - чтобы скачать у них файлы сайта (HTML, CSS, JS и т.д.). -- После посещения сайта вы тоже становитесь его пиром. -- Каждый сайт содержит файл `content.json`, который содержит SHA512 хеши всех остальные файлы - и подпись, созданную с помощью закрытого ключа сайта. -- Если владелец сайта (тот, кто владеет закрытым ключом для адреса сайта) изменяет сайт, он - подписывает новый `content.json` и публикует его для пиров. После этого пиры проверяют целостность `content.json` - (используя подпись), скачвают изменённые файлы и распространяют новый контент для других пиров. - -[Презентация о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -[Часто задаваемые вопросы »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) -[Документация разработчика ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) - -## Скриншоты - -![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -[Больше скриншотов в документации ZeroNet »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) - -## Как присоединиться? - -### Windows - -- Скачайте и распакуйте архив [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26МБ) -- Запустите `ZeroNet.exe` - -### macOS - -- Скачайте и распакуйте архив [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14МБ) -- Запустите `ZeroNet.app` - -### Linux (64 бит) - -- Скачайте и распакуйте архив [ZeroNet-linux.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip) (14МБ) -- Запустите `./ZeroNet.sh` - -> **Note** -> Запустите таким образом: `./ZeroNet.sh --ui_ip '*' --ui_restrict ваш_ip_адрес`, чтобы разрешить удалённое подключение к веб–интерфейсу. - -### Docker - -Официальный образ находится здесь: https://hub.docker.com/r/canewsin/zeronet/ - -### Android (arm, arm64, x86) - -- Для работы требуется Android как минимум версии 5.0 Lollipop -- [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) -- Скачать APK: https://github.com/canewsin/zeronet_mobile/releases - -### Android (arm, arm64, x86) Облегчённый клиент только для просмотра (1МБ) - -- Для работы требуется Android как минимум версии 4.1 Jelly Bean -- [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) - -### Установка из исходного кода - -```sh -wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip -unzip ZeroNet-src.zip -cd ZeroNet -sudo apt-get update -sudo apt-get install python3-pip -sudo python3 -m pip install -r requirements.txt -``` -- Запустите `python3 zeronet.py` - -Откройте приветственную страницу ZeroHello в вашем браузере по ссылке http://127.0.0.1:43110/ - -## Как мне создать сайт в ZeroNet? - -- Кликните на **⋮** > **"Create new, empty site"** в меню на сайте [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). -- Вы будете **перенаправлены** на совершенно новый сайт, который может быть изменён только вами! -- Вы можете найти и изменить контент вашего сайта в каталоге **data/[адрес_вашего_сайта]** -- После изменений откройте ваш сайт, переключите влево кнопку "0" в правом верхнем углу, затем нажмите кнопки **sign** и **publish** внизу - -Следующие шаги: [Документация разработчика ZeroNet](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) - -## Поддержите проект - -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Рекомендуем) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Другие способы: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) - -#### Спасибо! - -- Здесь вы можете получить больше информации, помощь, прочитать список изменений и исследовать ZeroNet сайты: https://www.reddit.com/r/zeronetx/ -- Общение происходит на канале [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или в [Gitter](https://gitter.im/canewsin/ZeroNet) -- Электронная почта: canews.in@gmail.com diff --git a/README-zh-cn.md b/README-zh-cn.md index 37095ff6..7bb1d307 100644 --- a/README-zh-cn.md +++ b/README-zh-cn.md @@ -1,132 +1,183 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.readthedocs.org/en/latest/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/) [English](./README.md) -使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.dev +使用 Bitcoin 加密和 BitTorrent 网络的去中心化网络 - https://zeronet.io -## 为什么? +## 为什么? -* 我们相信开放,自由,无审查的网络和通讯 +* 我们相信开放,自由,无审查的网络 * 不会受单点故障影响:只要有在线的节点,站点就会保持在线 -* 无托管费用:站点由访问者托管 -* 无法关闭:因为节点无处不在 -* 快速并可离线运行:即使没有互联网连接也可以使用 +* 无托管费用: 站点由访问者托管 +* 无法关闭: 因为节点无处不在 +* 快速并可离线运行: 即使没有互联网连接也可以使用 ## 功能 * 实时站点更新 * 支持 Namecoin 的 .bit 域名 - * 安装方便:只需解压并运行 + * 安装方便: 只需解压并运行 * 一键克隆存在的站点 - * 无需密码、基于 [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) - 的认证:您的账户被与比特币钱包相同的加密方法保护 - * 内建 SQL 服务器和 P2P 数据同步:让开发更简单并提升加载速度 - * 匿名性:完整的 Tor 网络支持,支持通过 .onion 隐藏服务相互连接而不是通过 IPv4 地址连接 + * 无需密码、基于 [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) 的认证:用与比特币钱包相同的加密方法用来保护你的账户 +你的账户被使用和比特币钱包相同的加密方法 + * 内建 SQL 服务器和 P2P 数据同步: 让开发更简单并提升加载速度 + * 匿名性: 完整的 Tor 网络支持,支持通过 .onion 隐藏服务相互连接而不是通过IPv4地址连接 * TLS 加密连接 * 自动打开 uPnP 端口 - * 多用户(openproxy)支持的插件 - * 适用于任何浏览器 / 操作系统 + * 插件和多用户 (开放式代理) 支持 + * 全平台兼容 ## 原理 -* 在运行 `zeronet.py` 后,您将可以通过 - `http://127.0.0.1:43110/{zeronet_address}`(例如: - `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`)访问 zeronet 中的站点 -* 在您浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点,从而下载需要的文件(html,css,js...) -* 您将会储存每一个浏览过的站点 -* 每个站点都包含一个名为 `content.json` 的文件,它储存了其他所有文件的 sha512 散列值以及一个通过站点私钥生成的签名 -* 如果站点的所有者(拥有站点地址的私钥)修改了站点,并且他 / 她签名了新的 `content.json` 然后推送至其他节点, - 那么这些节点将会在使用签名验证 `content.json` 的真实性后,下载修改后的文件并将新内容推送至另外的节点 +* 在你运行`zeronet.py`后你将可以通过`http://127.0.0.1:43110/{zeronet_address}` (比如. +`http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`)。访问 zeronet 中的站点。 -#### [关于 ZeroNet 加密,站点更新,多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [常见问题 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) +* 在你浏览 zeronet 站点时,客户端会尝试通过 BitTorrent 网络来寻找可用的节点,从而下载需要的文件 (html, css, js...) -#### [ZeroNet 开发者文档 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +* 你将会储存每一个浏览过的站点 +* 每个站点都包含一个名为 `content.json` ,它储存了其他所有文件的 sha512 hash 值 + 和一个通过站点私钥建立的签名 +* 如果站点的所有者 (拥有私钥的那个人) 修改了站点, 并且他/她签名了新的 `content.json` 然后推送至其他节点, +那么所有节点将会在验证 `content.json` 的真实性 (使用签名)后, 下载修改后的文件并推送至其他节点。 + +#### [有关于 ZeroNet 加密, 站点更新, 多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1qBxkroB_iiX2zHEn0dt-N-qRZgyEzui46XS2hEa3AA4/pub?start=false&loop=false&delayms=3000) +#### [常见问题 »](https://zeronet.readthedocs.org/en/latest/faq/) + +#### [ZeroNet开发者文档 »](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) ## 屏幕截图 ![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) +![ZeroTalk](https://zeronet.readthedocs.org/en/latest/img/zerotalk.png) -#### [ZeroNet 文档中的更多屏幕截图 »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) +#### [在 ZeroNet 文档里查看更多的屏幕截图 »](https://zeronet.readthedocs.org/en/latest/using_zeronet/sample_sites/) -## 如何加入 +## 如何加入 ? -### Windows +* 下载 ZeroBundle 文件包: + * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) + * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) + * [Linux 64bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) + * [Linux 32bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) +* 解压缩 +* 运行 `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) - - 下载 [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) - - 在任意位置解压缩 - - 运行 `ZeroNet.exe` - -### macOS +### Linux 命令行 - - 下载 [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) - - 在任意位置解压缩 - - 运行 `ZeroNet.app` - -### Linux (x86-64bit) +* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` +* `tar xvpfz ZeroBundle-linux64.tar.gz` +* `cd ZeroBundle` +* 执行 `./ZeroNet.sh` 来启动 - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` - - `unzip ZeroNet-linux.zip` - - `cd ZeroNet-linux` - - 使用以下命令启动 `./ZeroNet.sh` - - 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面 - - __提示:__ 若要允许在 Web 界面上的远程连接,使用以下命令启动 `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` +在你打开时他将会自动下载最新版本的 ZeroNet 。 -### 从源代码安装 +#### 在 Debian Linux 中手动安装 - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` - - `unzip ZeroNet-src.zip` - - `cd ZeroNet` - - `sudo apt-get update` - - `sudo apt-get install python3-pip` - - `sudo python3 -m pip install -r requirements.txt` - - 使用以下命令启动 `python3 zeronet.py` - - 在浏览器打开 http://127.0.0.1:43110/ 即可访问 ZeroHello 页面 +* `sudo apt-get update` +* `sudo apt-get install msgpack-python python-gevent` +* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` +* `tar xvpfz master.tar.gz` +* `cd ZeroNet-master` +* 执行 `python zeronet.py` 来启动 +* 在你的浏览器中打开 http://127.0.0.1:43110/ - ### Android (arm, arm64, x86) - - minimum Android version supported 21 (Android 5.0 Lollipop) - - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) - - APK download: https://github.com/canewsin/zeronet_mobile/releases -### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) - - minimum Android version supported 16 (JellyBean) - - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) +### [Vagrant](https://www.vagrantup.com/) + +* `vagrant up` +* 通过 `vagrant ssh` 连接到 VM +* `cd /vagrant` +* 运行 `python zeronet.py --ui_ip 0.0.0.0` +* 在你的浏览器中打开 http://127.0.0.1:43110/ + +### [Docker](https://www.docker.com/) +* `docker run -d -v :/root/data -p 15441:15441 -p 43110:43110 nofish/zeronet` +* 这个 Docker 镜像包含了 Tor ,但默认是禁用的,因为一些托管商不允许你在他们的服务器上运行 Tor。如果你希望启用它, +设置 `ENABLE_TOR` 环境变量为 `true` (默认: `false`). E.g.: + + `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 15441:15441 -p 43110:43110 nofish/zeronet` +* 在你的浏览器中打开 http://127.0.0.1:43110/ + +### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) + +* `virtualenv env` +* `source env/bin/activate` +* `pip install msgpack-python gevent` +* `python zeronet.py` +* 在你的浏览器中打开 http://127.0.0.1:43110/ ## 现有限制 -* 传输文件时没有压缩 +* 没有类似于 BitTorrent 的文件拆分来支持大文件 +* ~~没有比 BitTorrent 更好的匿名性~~ (已添加内置的完整 Tor 支持) +* 传输文件时没有压缩~~和加密~~ (已添加 TLS 支持) * 不支持私有站点 -## 如何创建一个 ZeroNet 站点? +## 如何创建一个 ZeroNet 站点? - * 点击 [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d) 站点的 **⋮** > **「新建空站点」** 菜单项 - * 您将被**重定向**到一个全新的站点,该站点只能由您修改 - * 您可以在 **data/[您的站点地址]** 目录中找到并修改网站的内容 - * 修改后打开您的网站,将右上角的「0」按钮拖到左侧,然后点击底部的**签名**并**发布**按钮 -接下来的步骤:[ZeroNet 开发者文档](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +如果 zeronet 在运行,把它关掉 +执行: +```bash +$ zeronet.py siteCreate +... +- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq +- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +- Site created! +$ zeronet.py +... +``` + +你已经完成了! 现在任何人都可以通过 +`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` +来访问你的站点 + +下一步: [ZeroNet 开发者文档](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) + + +## 我要如何修改 ZeroNet 站点? + +* 修改位于 data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 的目录. + 在你改好之后: + +```bash +$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... +Private key (input hidden): +``` + +* 输入你在创建站点时获得的私钥 + +```bash +$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +Site:13DNDk..bhC2 Publishing to 3/10 peers... +Site:13DNDk..bhC2 Successfuly published to 3 peers +- Serving files.... +``` + +* 就是这样! 你现在已经成功的签名并推送了你的更改。 + ## 帮助这个项目 -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) +- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX +- Paypal: https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/ +- Gratipay: https://gratipay.com/zeronet/ -#### 感谢您! +### 赞助商 -* 更多信息,帮助,变更记录和 zeronet 站点:https://www.reddit.com/r/zeronetx/ -* 前往 [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 或 [gitter](https://gitter.im/canewsin/ZeroNet) 和我们聊天 -* [这里](https://gitter.im/canewsin/ZeroNet)是一个 gitter 上的中文聊天室 -* Email: canews.in@gmail.com +* 在 OSX/Safari 下 [BrowserStack.com](https://www.browserstack.com) 带来更好的兼容性 + +#### 感谢! + +* 更多信息, 帮助, 变更记录和 zeronet 站点: https://www.reddit.com/r/zeronet/ +* 在: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) 和我们聊天,或者使用 [gitter](https://gitter.im/HelloZeroNet/ZeroNet) +* [这里](https://gitter.im/ZeroNet-zh/Lobby)是一个 gitter 上的中文聊天室 +* Email: hello@noloop.me diff --git a/README.md b/README.md index 70b79adc..cba22d7b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ -# ZeroNet [![tests](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml/badge.svg)](https://github.com/ZeroNetX/ZeroNet/actions/workflows/tests.yml) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/) [![Docker Pulls](https://img.shields.io/docker/pulls/canewsin/zeronet)](https://hub.docker.com/r/canewsin/zeronet) - -Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.dev / [ZeroNet Site](http://127.0.0.1:43110/1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX/), Unlike Bitcoin, ZeroNet Doesn't need a blockchain to run, But uses cryptography used by BTC, to ensure data integrity and validation. +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.readthedocs.org/en/latest/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/) + +[简体中文](./README-zh-cn.md) + +Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.io ## Why? @@ -33,124 +35,158 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ * After starting `zeronet.py` you will be able to visit zeronet sites using `http://127.0.0.1:43110/{zeronet_address}` (eg. - `http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d`). + `http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`). * When you visit a new zeronet site, it tries to find peers using the BitTorrent network so it can download the site files (html, css, js...) from them. * Each visited site is also served by you. * Every site contains a `content.json` file which holds all other files in a sha512 hash and a signature generated using the site's private key. * If the site owner (who has the private key for the site address) modifies the - site and signs the new `content.json` and publishes it to the peers. + site, then he/she signs the new `content.json` and publishes it to the peers. Afterwards, the peers verify the `content.json` integrity (using the signature), they download the modified files and publish the new content to other peers. #### [Slideshow about ZeroNet cryptography, site updates, multi-user sites »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [Frequently asked questions »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/faq/) +#### [Frequently asked questions »](https://zeronet.readthedocs.org/en/latest/faq/) -#### [ZeroNet Developer Documentation »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) +#### [ZeroNet Developer Documentation »](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) ## Screenshots ![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) +![ZeroTalk](https://zeronet.readthedocs.org/en/latest/img/zerotalk.png) -#### [More screenshots in ZeroNet docs »](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/using_zeronet/sample_sites/) +#### [More screenshots in ZeroNet docs »](https://zeronet.readthedocs.org/en/latest/using_zeronet/sample_sites/) ## How to join -### Windows +* Download ZeroBundle package: + * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) + * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) + * [Linux 64bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) + * [Linux 32bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) +* Unpack anywhere +* Run `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) - - Download [ZeroNet-win.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-win.zip) (26MB) - - Unpack anywhere - - Run `ZeroNet.exe` - -### macOS +### Linux terminal - - Download [ZeroNet-mac.zip](https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-mac.zip) (14MB) - - Unpack anywhere - - Run `ZeroNet.app` - -### Linux (x86-64bit) - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-linux.zip` - - `unzip ZeroNet-linux.zip` - - `cd ZeroNet-linux` - - Start with: `./ZeroNet.sh` - - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ - - __Tip:__ Start with `./ZeroNet.sh --ui_ip '*' --ui_restrict your.ip.address` to allow remote connections on the web interface. - - ### Android (arm, arm64, x86) - - minimum Android version supported 21 (Android 5.0 Lollipop) - - [Download from Google Play](https://play.google.com/store/apps/details?id=in.canews.zeronetmobile) - - APK download: https://github.com/canewsin/zeronet_mobile/releases +* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` +* `tar xvpfz ZeroBundle-linux64.tar.gz` +* `cd ZeroBundle` +* Start with `./ZeroNet.sh` -### Android (arm, arm64, x86) Thin Client for Preview Only (Size 1MB) - - minimum Android version supported 16 (JellyBean) - - [Download from Google Play](https://play.google.com/store/apps/details?id=dev.zeronetx.app.lite) +It downloads the latest version of ZeroNet then starts it automatically. +#### Manual install for Debian Linux -#### Docker -There is an official image, built from source at: https://hub.docker.com/r/canewsin/zeronet/ +* `sudo apt-get update` +* `sudo apt-get install msgpack-python python-gevent` +* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` +* `tar xvpfz master.tar.gz` +* `cd ZeroNet-master` +* Start with `python zeronet.py` +* Open http://127.0.0.1:43110/ in your browser -### Online Proxies -Proxies are like seed boxes for sites(i.e ZNX runs on a cloud vps), you can try zeronet experience from proxies. Add your proxy below if you have one. +### [FreeBSD](https://www.freebsd.org/) -#### Official ZNX Proxy : +* `pkg install zeronet` or `cd /usr/ports/security/zeronet/ && make install clean` +* `sysrc zeronet_enable="YES"` +* `service zeronet start` +* Open http://127.0.0.1:43110/ in your browser -https://proxy.zeronet.dev/ +### [Vagrant](https://www.vagrantup.com/) -https://zeronet.dev/ +* `vagrant up` +* Access VM with `vagrant ssh` +* `cd /vagrant` +* Run `python zeronet.py --ui_ip 0.0.0.0` +* Open http://127.0.0.1:43110/ in your browser -#### From Community +### [Docker](https://www.docker.com/) +* `docker run -d -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` +* This Docker image includes the Tor proxy, which is disabled by default. Beware that some +hosting providers may not allow you running Tor in their servers. If you want to enable it, +set `ENABLE_TOR` environment variable to `true` (Default: `false`). E.g.: -https://0net-preview.com/ + `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` +* Open http://127.0.0.1:43110/ in your browser -https://portal.ngnoid.tv/ +### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) -https://zeronet.ipfsscan.io/ - - -### Install from source - - - `wget https://github.com/ZeroNetX/ZeroNet/releases/latest/download/ZeroNet-src.zip` - - `unzip ZeroNet-src.zip` - - `cd ZeroNet` - - `sudo apt-get update` - - `sudo apt-get install python3-pip` - - `sudo python3 -m pip install -r requirements.txt` - - Start with: `python3 zeronet.py` - - Open the ZeroHello landing page in your browser by navigating to: http://127.0.0.1:43110/ +* `virtualenv env` +* `source env/bin/activate` +* `pip install msgpack-python gevent` +* `python zeronet.py` +* Open http://127.0.0.1:43110/ in your browser ## Current limitations -* File transactions are not compressed +* No torrent-like file splitting for big file support +* ~~No more anonymous than Bittorrent~~ (built-in full Tor support added) +* File transactions are not compressed ~~or encrypted yet~~ (TLS encryption added) * No private sites ## How can I create a ZeroNet site? - * Click on **⋮** > **"Create new, empty site"** menu item on the site [ZeroHello](http://127.0.0.1:43110/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d). - * You will be **redirected** to a completely new site that is only modifiable by you! - * You can find and modify your site's content in **data/[yoursiteaddress]** directory - * After the modifications open your site, drag the topright "0" button to left, then press **sign** and **publish** buttons on the bottom +Shut down zeronet if you are running it already + +```bash +$ zeronet.py siteCreate +... +- Site private key: 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq +- Site address: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +- Site created! +$ zeronet.py +... +``` + +Congratulations, you're finished! Now anyone can access your site using +`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` + +Next steps: [ZeroNet Developer Documentation](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) + + +## How can I modify a ZeroNet site? + +* Modify files located in data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 directory. + After you're finished: + +```bash +$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +- Signing site: 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... +Private key (input hidden): +``` + +* Enter the private key you got when you created the site, then: + +```bash +$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +Site:13DNDk..bhC2 Publishing to 3/10 peers... +Site:13DNDk..bhC2 Successfuly published to 3 peers +- Serving files.... +``` + +* That's it! You've successfully signed and published your modifications. -Next steps: [ZeroNet Developer Documentation](https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/site_development/getting_started/) ## Help keep this project alive -- Bitcoin: 1ZeroNetyV5mKY9JF1gsm82TuBXHpfdLX (Preferred) -- LiberaPay: https://liberapay.com/PramUkesh -- Paypal: https://paypal.me/PramUkesh -- Others: [Donate](!https://docs.zeronet.dev/1DeveLopDZL1cHfKi8UXHh2UBEhzH6HhMp/help_zeronet/donate/#help-to-keep-zeronet-development-alive) + +- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX +- Paypal: https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/ +- Gratipay: https://gratipay.com/zeronet/ + +### Sponsors + +* Better OSX/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com) #### Thank you! -* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronetx/ -* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/canewsin/ZeroNet) -* Email: canews.in@gmail.com +* More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronet/ +* Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/HelloZeroNet/ZeroNet) +* Email: hello@zeronet.io (PGP: CB9613AE) diff --git a/Vagrantfile b/Vagrantfile index 24fe0c45..6c4da894 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -40,6 +40,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| config.vm.provision "shell", inline: "sudo apt-get install msgpack-python python-gevent python-pip python-dev -y" config.vm.provision "shell", - inline: "sudo pip install msgpack --upgrade" + inline: "sudo pip install msgpack-python --upgrade" end diff --git a/plugins b/plugins deleted file mode 160000 index 689d9309..00000000 --- a/plugins +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 689d9309f73371f4681191b125ec3f2e14075eeb diff --git a/plugins/AnnounceZero/AnnounceZeroPlugin.py b/plugins/AnnounceZero/AnnounceZeroPlugin.py new file mode 100644 index 00000000..420ac182 --- /dev/null +++ b/plugins/AnnounceZero/AnnounceZeroPlugin.py @@ -0,0 +1,120 @@ +import hashlib +import time + +from Plugin import PluginManager +from Peer import Peer +from util import helper +from Crypt import CryptRsa + +allow_reload = False # No source reload supported in this plugin +time_full_announced = {} # Tracker address: Last announced all site to tracker +connection_pool = {} # Tracker address: Peer object + + +# Process result got back from tracker +def processPeerRes(site, peers): + added = 0 + # Ip4 + found_ip4 = 0 + for packed_address in peers["ip4"]: + found_ip4 += 1 + peer_ip, peer_port = helper.unpackAddress(packed_address) + if site.addPeer(peer_ip, peer_port): + added += 1 + # Onion + found_onion = 0 + for packed_address in peers["onion"]: + found_onion += 1 + peer_onion, peer_port = helper.unpackOnionAddress(packed_address) + if site.addPeer(peer_onion, peer_port): + added += 1 + + if added: + site.worker_manager.onPeers() + site.updateWebsocket(peers_added=added) + site.log.debug("Found %s ip4, %s onion peers, new: %s" % (found_ip4, found_onion, added)) + + +@PluginManager.registerTo("Site") +class SitePlugin(object): + def announceTracker(self, tracker_protocol, tracker_address, fileserver_port=0, add_types=[], my_peer_id="", mode="start"): + if tracker_protocol != "zero": + return super(SitePlugin, self).announceTracker( + tracker_protocol, tracker_address, fileserver_port, add_types, my_peer_id, mode + ) + + s = time.time() + + need_types = ["ip4"] + if self.connection_server and self.connection_server.tor_manager and self.connection_server.tor_manager.enabled: + need_types.append("onion") + + if mode == "start" or mode == "more": # Single: Announce only this site + sites = [self] + full_announce = False + else: # Multi: Announce all currently serving site + full_announce = True + if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 5: # No reannounce all sites within 5 minute + return True + time_full_announced[tracker_address] = time.time() + from Site import SiteManager + sites = [site for site in SiteManager.site_manager.sites.values() if site.settings["serving"]] + + # Create request + request = { + "hashes": [], "onions": [], "port": fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types + } + for site in sites: + if "onion" in add_types: + onion = self.connection_server.tor_manager.getOnion(site.address) + request["onions"].append(onion) + request["hashes"].append(hashlib.sha256(site.address).digest()) + + # Tracker can remove sites that we don't announce + if full_announce: + request["delete"] = True + + # Sent request to tracker + tracker = connection_pool.get(tracker_address) # Re-use tracker connection if possible + if not tracker: + tracker_ip, tracker_port = tracker_address.split(":") + tracker = Peer(tracker_ip, tracker_port, connection_server=self.connection_server) + connection_pool[tracker_address] = tracker + res = tracker.request("announce", request) + + if not res or "peers" not in res: + self.log.debug("Announce to %s failed: %s" % (tracker_address, res)) + if full_announce: + time_full_announced[tracker_address] = 0 + return False + + # Add peers from response to site + site_index = 0 + for site_res in res["peers"]: + site = sites[site_index] + processPeerRes(site, site_res) + site_index += 1 + + # Check if we need to sign prove the onion addresses + if "onion_sign_this" in res: + self.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites))) + request["onion_signs"] = {} + request["onion_sign_this"] = res["onion_sign_this"] + request["need_num"] = 0 + for site in sites: + onion = self.connection_server.tor_manager.getOnion(site.address) + publickey = self.connection_server.tor_manager.getPublickey(onion) + if publickey not in request["onion_signs"]: + sign = CryptRsa.sign(res["onion_sign_this"], self.connection_server.tor_manager.getPrivatekey(onion)) + request["onion_signs"][publickey] = sign + res = tracker.request("announce", request) + if not res or "onion_sign_this" in res: + self.log.debug("Announce onion address to %s failed: %s" % (tracker_address, res)) + if full_announce: + time_full_announced[tracker_address] = 0 + return False + + if full_announce: + tracker.remove() # Close connection, we don't need it in next 5 minute + + return time.time() - s diff --git a/plugins/AnnounceZero/__init__.py b/plugins/AnnounceZero/__init__.py new file mode 100644 index 00000000..4b9cbe10 --- /dev/null +++ b/plugins/AnnounceZero/__init__.py @@ -0,0 +1 @@ +import AnnounceZeroPlugin \ No newline at end of file diff --git a/plugins/CryptMessage/CryptMessage.py b/plugins/CryptMessage/CryptMessage.py new file mode 100644 index 00000000..955dd9b1 --- /dev/null +++ b/plugins/CryptMessage/CryptMessage.py @@ -0,0 +1,53 @@ +from lib.pybitcointools import bitcoin as btctools +import hashlib + +ecc_cache = {} + + +def encrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'): + from lib import pyelliptic + curve, pubkey_x, pubkey_y, i = pyelliptic.ECC._decode_pubkey(pubkey) + if ephemcurve is None: + ephemcurve = curve + ephem = pyelliptic.ECC(curve=ephemcurve) + key = hashlib.sha512(ephem.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + key_e, key_m = key[:32], key[32:] + pubkey = ephem.get_pubkey() + iv = pyelliptic.OpenSSL.rand(pyelliptic.OpenSSL.get_cipher(ciphername).get_blocksize()) + ctx = pyelliptic.Cipher(key_e, iv, 1, ciphername) + ciphertext = iv + pubkey + ctx.ciphering(data) + mac = pyelliptic.hmac_sha256(key_m, ciphertext) + return key_e, ciphertext + mac + + +def split(encrypted): + iv = encrypted[0:16] + ciphertext = encrypted[16+70:-32] + + return iv, ciphertext + + +def getEcc(privatekey=None): + from lib import pyelliptic + global eccs + if privatekey not in ecc_cache: + if privatekey: + publickey_bin = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin") + publickey_openssl = toOpensslPublickey(publickey_bin) + privatekey_openssl = toOpensslPrivatekey(privatekey) + ecc_cache[privatekey] = pyelliptic.ECC(curve='secp256k1', privkey=privatekey_openssl, pubkey=publickey_openssl) + else: + ecc_cache[None] = pyelliptic.ECC() + return ecc_cache[privatekey] + + +def toOpensslPrivatekey(privatekey): + privatekey_bin = btctools.encode_privkey(privatekey, "bin") + return '\x02\xca\x00\x20' + privatekey_bin + + +def toOpensslPublickey(publickey): + publickey_bin = btctools.encode_pubkey(publickey, "bin") + publickey_bin = publickey_bin[1:] + publickey_openssl = '\x02\xca\x00 ' + publickey_bin[:32] + '\x00 ' + publickey_bin[32:] + return publickey_openssl diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py new file mode 100644 index 00000000..0302c83a --- /dev/null +++ b/plugins/CryptMessage/CryptMessagePlugin.py @@ -0,0 +1,149 @@ +import base64 +import os + +from Plugin import PluginManager +from Crypt import CryptBitcoin +from lib.pybitcointools import bitcoin as btctools + +import CryptMessage + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def encrypt(self, text, publickey): + encrypted = CryptMessage.encrypt(text, CryptMessage.toOpensslPublickey(publickey)) + return encrypted + + def decrypt(self, encrypted, privatekey): + back = CryptMessage.getEcc(privatekey).decrypt(encrypted) + return back.decode("utf8") + + # - Actions - + + # Returns user's public key unique to site + # Return: Public key + def actionUserPublickey(self, to, index=0): + publickey = self.user.getEncryptPublickey(self.site.address, index) + self.response(to, publickey) + + # Encrypt a text using the publickey or user's sites unique publickey + # Return: Encrypted text using base64 encoding + def actionEciesEncrypt(self, to, text, publickey=0, return_aes_key=False): + if type(publickey) is int: # Encrypt using user's publickey + publickey = self.user.getEncryptPublickey(self.site.address, publickey) + aes_key, encrypted = self.encrypt(text.encode("utf8"), publickey.decode("base64")) + if return_aes_key: + self.response(to, [base64.b64encode(encrypted), base64.b64encode(aes_key)]) + else: + self.response(to, base64.b64encode(encrypted)) + + # Decrypt a text using privatekey or the user's site unique private key + # Return: Decrypted text or list of decrypted texts + def actionEciesDecrypt(self, to, param, privatekey=0): + if type(privatekey) is int: # Decrypt using user's privatekey + privatekey = self.user.getEncryptPrivatekey(self.site.address, privatekey) + + if type(param) == list: + encrypted_texts = param + else: + encrypted_texts = [param] + + texts = [] # Decoded texts + for encrypted_text in encrypted_texts: + try: + text = self.decrypt(encrypted_text.decode("base64"), privatekey) + texts.append(text) + except Exception, err: + texts.append(None) + + if type(param) == list: + self.response(to, texts) + else: + self.response(to, texts[0]) + + # Encrypt a text using AES + # Return: Iv, AES key, Encrypted text + def actionAesEncrypt(self, to, text, key=None, iv=None): + from lib import pyelliptic + + if key: + key = key.decode("base64") + else: + key = os.urandom(32) + + if iv: # Generate new AES key if not definied + iv = iv.decode("base64") + else: + iv = pyelliptic.Cipher.gen_IV('aes-256-cbc') + + if text: + encrypted = pyelliptic.Cipher(key, iv, 1, ciphername='aes-256-cbc').ciphering(text.encode("utf8")) + else: + encrypted = "" + + self.response(to, [base64.b64encode(key), base64.b64encode(iv), base64.b64encode(encrypted)]) + + # Decrypt a text using AES + # Return: Decrypted text + def actionAesDecrypt(self, to, *args): + from lib import pyelliptic + + if len(args) == 3: # Single decrypt + encrypted_texts = [(args[0], args[1])] + keys = [args[2]] + else: # Batch decrypt + encrypted_texts, keys = args + + texts = [] # Decoded texts + for iv, encrypted_text in encrypted_texts: + encrypted_text = encrypted_text.decode("base64") + iv = iv.decode("base64") + text = None + for key in keys: + ctx = pyelliptic.Cipher(key.decode("base64"), iv, 0, ciphername='aes-256-cbc') + try: + decrypted = ctx.ciphering(encrypted_text) + if decrypted and decrypted.decode("utf8"): # Valid text decoded + text = decrypted + except Exception, err: + pass + texts.append(text) + + if len(args) == 3: + self.response(to, texts[0]) + else: + self.response(to, texts) + + +@PluginManager.registerTo("User") +class UserPlugin(object): + def getEncryptPrivatekey(self, address, param_index=0): + assert param_index >= 0 and param_index <= 1000 + site_data = self.getSiteData(address) + + if site_data.get("cert"): # Different privatekey for different cert provider + index = param_index + self.getAddressAuthIndex(site_data["cert"]) + else: + index = param_index + + if "encrypt_privatekey_%s" % index not in site_data: + address_index = self.getAddressAuthIndex(address) + crypt_index = address_index + 1000 + index + site_data["encrypt_privatekey_%s" % index] = CryptBitcoin.hdPrivatekey(self.master_seed, crypt_index) + self.log.debug("New encrypt privatekey generated for %s:%s" % (address, index)) + return site_data["encrypt_privatekey_%s" % index] + + def getEncryptPublickey(self, address, param_index=0): + assert param_index >= 0 and param_index <= 1000 + site_data = self.getSiteData(address) + + if site_data.get("cert"): # Different privatekey for different cert provider + index = param_index + self.getAddressAuthIndex(site_data["cert"]) + else: + index = param_index + + if "encrypt_publickey_%s" % index not in site_data: + privatekey = self.getEncryptPrivatekey(address, param_index) + publickey = btctools.encode_pubkey(btctools.privtopub(privatekey), "bin_compressed") + site_data["encrypt_publickey_%s" % index] = base64.b64encode(publickey) + return site_data["encrypt_publickey_%s" % index] diff --git a/plugins/CryptMessage/Test/TestCrypt.py b/plugins/CryptMessage/Test/TestCrypt.py new file mode 100644 index 00000000..8e16cba2 --- /dev/null +++ b/plugins/CryptMessage/Test/TestCrypt.py @@ -0,0 +1,106 @@ +import pytest +from CryptMessage import CryptMessage + +@pytest.mark.usefixtures("resetSettings") +class TestCrypt: + def testPublickey(self, ui_websocket): + pub = ui_websocket.testAction("UserPublickey", 0) + assert len(pub) == 44 # Compressed, b64 encoded publickey + + # Different pubkey for specificed index + assert ui_websocket.testAction("UserPublickey", 1) != ui_websocket.testAction("UserPublickey", 0) + + # Same publickey for same index + assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2) + + # Different publickey for different cert + pub1 = ui_websocket.testAction("UserPublickey", 0) + site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) + site_data["cert"] = "zeroid.bit" + pub2 = ui_websocket.testAction("UserPublickey", 0) + assert pub1 != pub2 + + + + def testEcies(self, ui_websocket): + ui_websocket.actionUserPublickey(0, 0) + pub = ui_websocket.ws.result + + ui_websocket.actionEciesEncrypt(0, "hello", pub) + encrypted = ui_websocket.ws.result + assert len(encrypted) == 180 + + # Don't allow decrypt using other privatekey index + ui_websocket.actionEciesDecrypt(0, encrypted, 123) + decrypted = ui_websocket.ws.result + assert decrypted != "hello" + + # Decrypt using correct privatekey + ui_websocket.actionEciesDecrypt(0, encrypted) + decrypted = ui_websocket.ws.result + assert decrypted == "hello" + + # Decrypt batch + ui_websocket.actionEciesDecrypt(0, [encrypted, "baad", encrypted]) + decrypted = ui_websocket.ws.result + assert decrypted == ["hello", None, "hello"] + + + def testEciesUtf8(self, ui_websocket): + # Utf8 test + utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9p' + ui_websocket.actionEciesEncrypt(0, utf8_text) + encrypted = ui_websocket.ws.result + + ui_websocket.actionEciesDecrypt(0, encrypted) + assert ui_websocket.ws.result == utf8_text + + + def testEciesAes(self, ui_websocket): + ui_websocket.actionEciesEncrypt(0, "hello", return_aes_key=True) + ecies_encrypted, aes_key = ui_websocket.ws.result + + # Decrypt using Ecies + ui_websocket.actionEciesDecrypt(0, ecies_encrypted) + assert ui_websocket.ws.result == "hello" + + # Decrypt using AES + aes_iv, aes_encrypted = CryptMessage.split(ecies_encrypted.decode("base64")) + + ui_websocket.actionAesDecrypt(0, aes_iv.encode("base64"), aes_encrypted.encode("base64"), aes_key) + assert ui_websocket.ws.result == "hello" + + + def testAes(self, ui_websocket): + ui_websocket.actionAesEncrypt(0, "hello") + key, iv, encrypted = ui_websocket.ws.result + + assert len(key) == 44 + assert len(iv) == 24 + assert len(encrypted) == 24 + + # Single decrypt + ui_websocket.actionAesDecrypt(0, iv, encrypted, key) + assert ui_websocket.ws.result == "hello" + + # Batch decrypt + ui_websocket.actionAesEncrypt(0, "hello") + key2, iv2, encrypted2 = ui_websocket.ws.result + + assert [key, iv, encrypted] != [key2, iv2, encrypted2] + + # 2 correct key + ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key]) + assert ui_websocket.ws.result == ["hello", "hello", None, None] + + # 3 key + ui_websocket.actionAesDecrypt(0, [[iv, encrypted], [iv, encrypted], [iv, "baad"], [iv2, encrypted2]], [key, key2]) + assert ui_websocket.ws.result == ["hello", "hello", None, "hello"] + + def testAesUtf8(self, ui_websocket): + utf8_text = u'\xc1rv\xedzt\xfbr\xf5t\xfck\xf6rf\xfar\xf3g\xe9' + ui_websocket.actionAesEncrypt(0, utf8_text) + key, iv, encrypted = ui_websocket.ws.result + + ui_websocket.actionAesDecrypt(0, iv, encrypted, key) + assert ui_websocket.ws.result == utf8_text diff --git a/plugins/CryptMessage/Test/conftest.py b/plugins/CryptMessage/Test/conftest.py new file mode 100644 index 00000000..8c1df5b2 --- /dev/null +++ b/plugins/CryptMessage/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/CryptMessage/Test/pytest.ini b/plugins/CryptMessage/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/CryptMessage/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/CryptMessage/__init__.py b/plugins/CryptMessage/__init__.py new file mode 100644 index 00000000..3eb41820 --- /dev/null +++ b/plugins/CryptMessage/__init__.py @@ -0,0 +1 @@ +import CryptMessagePlugin \ No newline at end of file diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py new file mode 100644 index 00000000..e80f9de5 --- /dev/null +++ b/plugins/FilePack/FilePackPlugin.py @@ -0,0 +1,89 @@ +import os +import re + +from Plugin import PluginManager +from Config import config +from util import helper + + +# Keep archive open for faster reponse times for large sites +archive_cache = {} + + +def closeArchive(archive_path): + if archive_path in archive_cache: + del archive_cache[archive_path] + + +def openArchive(archive_path, path_within): + if archive_path not in archive_cache: + if archive_path.endswith("tar.gz"): + import tarfile + archive_cache[archive_path] = tarfile.open(archive_path, "r:gz") + elif archive_path.endswith("tar.bz2"): + import tarfile + archive_cache[archive_path] = tarfile.open(archive_path, "r:bz2") + else: + import zipfile + archive_cache[archive_path] = zipfile.ZipFile(archive_path) + helper.timer(5, lambda: closeArchive(archive_path)) # Close after 5 sec + + archive = archive_cache[archive_path] + + if archive_path.endswith(".zip"): + return archive.open(path_within) + else: + return archive.extractfile(path_within.encode("utf8")) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionSiteMedia(self, path, header_length=True): + if ".zip/" in path or ".tar.gz/" in path: + path_parts = self.parsePath(path) + file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8")) + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path) + archive_path, path_within = match.groups() + if not os.path.isfile(archive_path): + site = self.server.site_manager.get(path_parts["address"]) + if not site: + self.error404(path) + # Wait until file downloads + result = site.needFile(site.storage.getInnerPath(archive_path), priority=10) + # Send virutal file path download finished event to remove loading screen + site.updateWebsocket(file_done=site.storage.getInnerPath(file_path)) + if not result: + return self.error404(path) + try: + file = openArchive(archive_path, path_within) + content_type = self.getContentType(file_path) + self.sendHeader(200, content_type=content_type) + return self.streamFile(file) + except Exception, err: + self.log.debug("Error opening archive file: %s" % err) + return self.error404(path) + + return super(UiRequestPlugin, self).actionSiteMedia(path, header_length=header_length) + + def streamFile(self, file): + while 1: + try: + block = file.read(60 * 1024) + if block: + yield block + else: + raise StopIteration + except StopIteration: + file.close() + break + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def isFile(self, inner_path): + if ".zip/" in inner_path or ".tar.gz/" in inner_path or ".tar.bz2/" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", inner_path) + inner_archive_path, path_within = match.groups() + return super(SiteStoragePlugin, self).isFile(inner_archive_path) + else: + return super(SiteStoragePlugin, self).isFile(inner_path) diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py new file mode 100644 index 00000000..ab07a1ff --- /dev/null +++ b/plugins/FilePack/__init__.py @@ -0,0 +1 @@ +import FilePackPlugin \ No newline at end of file diff --git a/plugins/MergerSite/MergerSitePlugin.py b/plugins/MergerSite/MergerSitePlugin.py new file mode 100644 index 00000000..af1a8ff5 --- /dev/null +++ b/plugins/MergerSite/MergerSitePlugin.py @@ -0,0 +1,343 @@ +import re +import time + +from Plugin import PluginManager +from Translate import Translate +from util import RateLimit +from util import helper +from Debug import Debug +try: + import OptionalManager.UiWebsocketPlugin # To make optioanlFileInfo merger sites compatible +except Exception: + pass + +if "merger_db" not in locals().keys(): # To keep merger_sites between module reloads + merger_db = {} # Sites that allowed to list other sites {address: [type1, type2...]} + merged_db = {} # Sites that allowed to be merged to other sites {address: type, ...} + merged_to_merger = {} # {address: [site1, site2, ...]} cache + site_manager = None # Site manager for merger sites + +if "_" not in locals(): + _ = Translate("plugins/MergerSite/languages/") + + +# Check if the site has permission to this merger site +def checkMergerPath(address, inner_path): + merged_match = re.match("^merged-(.*?)/([A-Za-z0-9]{26,35})/", inner_path) + if merged_match: + merger_type = merged_match.group(1) + # Check if merged site is allowed to include other sites + if merger_type in merger_db.get(address, []): + # Check if included site allows to include + merged_address = merged_match.group(2) + if merged_db.get(merged_address) == merger_type: + inner_path = re.sub("^merged-(.*?)/([A-Za-z0-9]{26,35})/", "", inner_path) + return merged_address, inner_path + else: + raise Exception( + "Merger site (%s) does not have permission for merged site: %s (%s)" % + (merger_type, merged_address, merged_db.get(merged_address)) + ) + else: + raise Exception("No merger (%s) permission to load:
%s (%s not in %s)" % ( + address, inner_path, merger_type, merger_db.get(address, [])) + ) + else: + raise Exception("Invalid merger path: %s" % inner_path) + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + # Download new site + def actionMergerSiteAdd(self, to, addresses): + if type(addresses) != list: + # Single site add + addresses = [addresses] + # Check if the site has merger permission + merger_types = merger_db.get(self.site.address) + if not merger_types: + return self.response(to, {"error": "Not a merger site"}) + + if RateLimit.isAllowed(self.site.address + "-MergerSiteAdd", 10) and len(addresses) == 1: + # Without confirmation if only one site address and not called in last 10 sec + self.cbMergerSiteAdd(to, addresses) + else: + self.cmd( + "confirm", + [_["Add %s new site?"] % len(addresses), "Add"], + lambda (res): self.cbMergerSiteAdd(to, addresses) + ) + self.response(to, "ok") + + # Callback of adding new site confirmation + def cbMergerSiteAdd(self, to, addresses): + added = 0 + for address in addresses: + added += 1 + site_manager.need(address) + if added: + self.cmd("notification", ["done", _["Added %s new site"] % added, 5000]) + RateLimit.called(self.site.address + "-MergerSiteAdd") + site_manager.updateMergerSites() + + # Delete a merged site + def actionMergerSiteDelete(self, to, address): + site = self.server.sites.get(address) + if not site: + return self.response(to, {"error": "No site found: %s" % address}) + + merger_types = merger_db.get(self.site.address) + if not merger_types: + return self.response(to, {"error": "Not a merger site"}) + if merged_db.get(address) not in merger_types: + return self.response(to, {"error": "Merged type (%s) not in %s" % (merged_db.get(address), merger_types)}) + + self.cmd("notification", ["done", _["Site deleted: %s"] % address, 5000]) + self.response(to, "ok") + + # Lists merged sites + def actionMergerSiteList(self, to, query_site_info=False): + merger_types = merger_db.get(self.site.address) + ret = {} + if not merger_types: + return self.response(to, {"error": "Not a merger site"}) + for address, merged_type in merged_db.iteritems(): + if merged_type not in merger_types: + continue # Site not for us + if query_site_info: + site = self.server.sites.get(address) + ret[address] = self.formatSiteInfo(site, create_user=False) + else: + ret[address] = merged_type + self.response(to, ret) + + def hasSitePermission(self, address): + if super(UiWebsocketPlugin, self).hasSitePermission(address): + return True + else: + if self.site.address in [merger_site.address for merger_site in merged_to_merger.get(address, [])]: + return True + else: + return False + + # Add support merger sites for file commands + def mergerFuncWrapper(self, func_name, to, inner_path, *args, **kwargs): + func = getattr(super(UiWebsocketPlugin, self), func_name) + if inner_path.startswith("merged-"): + merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) + + # Set the same cert for merged site + merger_cert = self.user.getSiteData(self.site.address).get("cert") + if merger_cert and self.user.getSiteData(merged_address).get("cert") != merger_cert: + self.user.setCert(merged_address, merger_cert) + + site_before = self.site # Save to be able to change it back after we ran the command + self.site = self.server.sites.get(merged_address) # Change the site to the merged one + try: + back = func(to, merged_inner_path, *args, **kwargs) + finally: + self.site = site_before # Change back to original site + return back + else: + return func(to, inner_path, *args, **kwargs) + + def actionFileGet(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs) + + def actionFileWrite(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileWrite", to, inner_path, *args, **kwargs) + + def actionFileDelete(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileDelete", to, inner_path, *args, **kwargs) + + def actionFileRules(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs) + + def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs) + + def actionOptionalFileDelete(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionOptionalFileDelete", to, inner_path, *args, **kwargs) + + # Add support merger sites for file commands with privatekey parameter + def mergerFuncWrapperWithPrivatekey(self, func_name, to, privatekey, inner_path, *args, **kwargs): + func = getattr(super(UiWebsocketPlugin, self), func_name) + if inner_path.startswith("merged-"): + merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) + merged_site = self.server.sites.get(merged_address) + + # Set the same cert for merged site + merger_cert = self.user.getSiteData(self.site.address).get("cert") + if merger_cert: + self.user.setCert(merged_address, merger_cert) + + site_before = self.site # Save to be able to change it back after we ran the command + self.site = merged_site # Change the site to the merged one + try: + back = func(to, privatekey, merged_inner_path, *args, **kwargs) + finally: + self.site = site_before # Change back to original site + return back + else: + return func(to, privatekey, inner_path, *args, **kwargs) + + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): + return self.mergerFuncWrapperWithPrivatekey("actionSiteSign", to, privatekey, inner_path, *args, **kwargs) + + def actionSitePublish(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): + return self.mergerFuncWrapperWithPrivatekey("actionSitePublish", to, privatekey, inner_path, *args, **kwargs) + + def actionPermissionAdd(self, to, permission): + super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission) + if permission.startswith("Merger"): + self.site.storage.rebuildDb() + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Allow to load merged site files using /merged-ZeroMe/address/file.jpg + def parsePath(self, path): + path_parts = super(UiRequestPlugin, self).parsePath(path) + if "merged-" not in path: # Optimization + return path_parts + path_parts["address"], path_parts["inner_path"] = checkMergerPath(path_parts["address"], path_parts["inner_path"]) + return path_parts + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + # Also rebuild from merged sites + def getDbFiles(self): + merger_types = merger_db.get(self.site.address) + + # First return the site's own db files + for item in super(SiteStoragePlugin, self).getDbFiles(): + yield item + + # Not a merger site, that's all + if not merger_types: + raise StopIteration + + merged_sites = [ + site_manager.sites[address] + for address, merged_type in merged_db.iteritems() + if merged_type in merger_types + ] + for merged_site in merged_sites: + self.log.debug("Loading merged site: %s" % merged_site) + merged_type = merged_db[merged_site.address] + for content_inner_path, content in merged_site.content_manager.contents.iteritems(): + # content.json file itself + if merged_site.storage.isFile(content_inner_path): # Missing content.json file + merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path) + yield merged_inner_path, merged_site.storage.open(content_inner_path) + else: + merged_site.log.error("[MISSING] %s" % content_inner_path) + # Data files in content.json + content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site + for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + if not file_relative_path.endswith(".json"): + continue # We only interesed in json files + file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir + file_inner_path = file_inner_path.strip("/") # Strip leading / + if merged_site.storage.isFile(file_inner_path): + merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path) + yield merged_inner_path, merged_site.storage.open(file_inner_path) + else: + merged_site.log.error("[MISSING] %s" % file_inner_path) + + # Also notice merger sites on a merged site file change + def onUpdated(self, inner_path, file=None): + super(SiteStoragePlugin, self).onUpdated(inner_path, file) + + merged_type = merged_db.get(self.site.address) + + for merger_site in merged_to_merger.get(self.site.address, []): + if merger_site.address == self.site.address: # Avoid infinite loop + continue + virtual_path = "merged-%s/%s/%s" % (merged_type, self.site.address, inner_path) + if inner_path.endswith(".json"): + if file is not None: + merger_site.storage.onUpdated(virtual_path, file=file) + else: + merger_site.storage.onUpdated(virtual_path, file=self.open(inner_path)) + else: + merger_site.storage.onUpdated(virtual_path) + + +@PluginManager.registerTo("Site") +class SitePlugin(object): + def fileDone(self, inner_path): + super(SitePlugin, self).fileDone(inner_path) + + for merger_site in merged_to_merger.get(self.address, []): + if merger_site.address == self.address: + continue + for ws in merger_site.websockets: + ws.event("siteChanged", self, {"event": ["file_done", inner_path]}) + + def fileFailed(self, inner_path): + super(SitePlugin, self).fileFailed(inner_path) + + for merger_site in merged_to_merger.get(self.address, []): + if merger_site.address == self.address: + continue + for ws in merger_site.websockets: + ws.event("siteChanged", self, {"event": ["file_failed", inner_path]}) + + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + # Update merger site for site types + def updateMergerSites(self): + global merger_db, merged_db, merged_to_merger, site_manager + s = time.time() + merger_db = {} + merged_db = {} + merged_to_merger = {} + site_manager = self + if not self.sites: + return + for site in self.sites.itervalues(): + # Update merged sites + try: + merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type") + except Exception, err: + self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err))) + continue + if merged_type: + merged_db[site.address] = merged_type + + # Update merger sites + for permission in site.settings["permissions"]: + if not permission.startswith("Merger:"): + continue + if merged_type: + self.log.error( + "Removing permission %s from %s: Merger and merged at the same time." % + (permission, site.address) + ) + site.settings["permissions"].remove(permission) + continue + merger_type = permission.replace("Merger:", "") + if site.address not in merger_db: + merger_db[site.address] = [] + merger_db[site.address].append(merger_type) + site_manager.sites[site.address] = site + + # Update merged to merger + if merged_type: + for merger_site in self.sites.itervalues(): + if "Merger:" + merged_type in merger_site.settings["permissions"]: + if site.address not in merged_to_merger: + merged_to_merger[site.address] = [] + merged_to_merger[site.address].append(merger_site) + self.log.debug("Updated merger sites in %.3fs" % (time.time() - s)) + + def load(self, *args, **kwags): + super(SiteManagerPlugin, self).load(*args, **kwags) + self.updateMergerSites() + + def save(self, *args, **kwags): + super(SiteManagerPlugin, self).save(*args, **kwags) + self.updateMergerSites() diff --git a/plugins/MergerSite/__init__.py b/plugins/MergerSite/__init__.py new file mode 100644 index 00000000..f1f3412c --- /dev/null +++ b/plugins/MergerSite/__init__.py @@ -0,0 +1 @@ +import MergerSitePlugin \ No newline at end of file diff --git a/plugins/MergerSite/languages/fr.json b/plugins/MergerSite/languages/fr.json new file mode 100644 index 00000000..9d59fde9 --- /dev/null +++ b/plugins/MergerSite/languages/fr.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Ajouter le site %s ?", + "Added %s new site": "Site %s ajouté", + "Site deleted: %s": "Site %s supprimé" +} diff --git a/plugins/MergerSite/languages/hu.json b/plugins/MergerSite/languages/hu.json new file mode 100644 index 00000000..8e377aaa --- /dev/null +++ b/plugins/MergerSite/languages/hu.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Új oldal hozzáadása: %s?", + "Added %s new site": "Új oldal hozzáadva: %s", + "Site deleted: %s": "Oldal törölve: %s" +} diff --git a/plugins/MergerSite/languages/it.json b/plugins/MergerSite/languages/it.json new file mode 100644 index 00000000..d56c9817 --- /dev/null +++ b/plugins/MergerSite/languages/it.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Aggiungere %s nuovo sito ?", + "Added %s new site": "Sito %s aggiunto", + "Site deleted: %s": "Sito %s eliminato" +} diff --git a/plugins/MergerSite/languages/pt-br.json b/plugins/MergerSite/languages/pt-br.json new file mode 100644 index 00000000..cdc298cb --- /dev/null +++ b/plugins/MergerSite/languages/pt-br.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Adicionar %s novo site?", + "Added %s new site": "Site %s adicionado", + "Site deleted: %s": "Site removido: %s" +} diff --git a/plugins/MergerSite/languages/tr.json b/plugins/MergerSite/languages/tr.json new file mode 100644 index 00000000..5afb3942 --- /dev/null +++ b/plugins/MergerSite/languages/tr.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "%s sitesi eklensin mi?", + "Added %s new site": "%s sitesi eklendi", + "Site deleted: %s": "%s sitesi silindi" +} diff --git a/plugins/MergerSite/languages/zh-tw.json b/plugins/MergerSite/languages/zh-tw.json new file mode 100644 index 00000000..a0684e63 --- /dev/null +++ b/plugins/MergerSite/languages/zh-tw.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "添加新網站: %s?", + "Added %s new site": "已添加到新網站:%s", + "Site deleted: %s": "網站已刪除:%s" +} diff --git a/plugins/MergerSite/languages/zh.json b/plugins/MergerSite/languages/zh.json new file mode 100644 index 00000000..127044e6 --- /dev/null +++ b/plugins/MergerSite/languages/zh.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "添加新站点: %s?", + "Added %s new site": "已添加到新站点:%s", + "Site deleted: %s": "站点已删除:%s" +} diff --git a/plugins/Mute/MutePlugin.py b/plugins/Mute/MutePlugin.py new file mode 100644 index 00000000..d16a97cd --- /dev/null +++ b/plugins/Mute/MutePlugin.py @@ -0,0 +1,100 @@ +import time +import json +import os +import re + +from Plugin import PluginManager +from Translate import Translate +from Config import config +from util import helper + + +if os.path.isfile("%s/mutes.json" % config.data_dir): + try: + mutes = json.load(open("%s/mutes.json" % config.data_dir))["mutes"] + except Exception, err: + mutes = {} +else: + open("%s/mutes.json" % config.data_dir, "w").write('{"mutes": {}}') + mutes = {} + +if "_" not in locals(): + _ = Translate("plugins/Mute/languages/") + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + # Search and remove or readd files of an user + def changeDb(self, auth_address, action): + self.log.debug("Mute action %s on user %s" % (action, auth_address)) + res = self.site.content_manager.contents.db.execute( + "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path", + {"inner_path": "%%/%s/%%" % auth_address} + ) + for row in res: + site = self.server.sites.get(row["address"]) + if not site: + continue + dir_inner_path = helper.getDirname(row["inner_path"]) + for file_name in site.storage.walk(dir_inner_path): + if action == "remove": + site.storage.onUpdated(dir_inner_path + file_name, False) + else: + site.storage.onUpdated(dir_inner_path + file_name) + site.onFileDone(dir_inner_path + file_name) + + def cbMuteAdd(self, to, auth_address, cert_user_id, reason): + mutes[auth_address] = {"cert_user_id": cert_user_id, "reason": reason, "source": self.site.address, "date_added": time.time()} + self.saveMutes() + self.changeDb(auth_address, "remove") + self.response(to, "ok") + + def actionMuteAdd(self, to, auth_address, cert_user_id, reason): + if "ADMIN" in self.getPermissions(to): + self.cbMuteAdd(to, auth_address, cert_user_id, reason) + else: + self.cmd( + "confirm", + [_["Hide all content from %s?"] % cert_user_id, _["Mute"]], + lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason) + ) + + def cbMuteRemove(self, to, auth_address): + del mutes[auth_address] + self.saveMutes() + self.changeDb(auth_address, "load") + self.response(to, "ok") + + def actionMuteRemove(self, to, auth_address): + if "ADMIN" in self.getPermissions(to): + self.cbMuteRemove(to, auth_address) + else: + self.cmd( + "confirm", + [_["Unmute %s?"] % mutes[auth_address]["cert_user_id"], _["Unmute"]], + lambda (res): self.cbMuteRemove(to, auth_address) + ) + + def actionMuteList(self, to): + if "ADMIN" in self.getPermissions(to): + self.response(to, mutes) + else: + return self.response(to, {"error": "Only ADMIN sites can list mutes"}) + + def saveMutes(self): + helper.atomicWrite("%s/mutes.json" % config.data_dir, json.dumps({"mutes": mutes}, indent=2, sort_keys=True)) + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def updateDbFile(self, inner_path, file=None, cur=None): + if file is not False: # File deletion always allowed + # Find for bitcoin addresses in file path + matches = re.findall("/(1[A-Za-z0-9]{26,35})/", inner_path) + # Check if any of the adresses are in the mute list + for auth_address in matches: + if auth_address in mutes: + self.log.debug("Mute match: %s, ignoring %s" % (auth_address, inner_path)) + return False + + return super(SiteStoragePlugin, self).updateDbFile(inner_path, file=file, cur=cur) diff --git a/plugins/Mute/__init__.py b/plugins/Mute/__init__.py new file mode 100644 index 00000000..f9d1081c --- /dev/null +++ b/plugins/Mute/__init__.py @@ -0,0 +1 @@ +import MutePlugin \ No newline at end of file diff --git a/plugins/Mute/languages/hu.json b/plugins/Mute/languages/hu.json new file mode 100644 index 00000000..e3332db8 --- /dev/null +++ b/plugins/Mute/languages/hu.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s tartalmaniak elrejtése?", + "Mute": "Elnémítás", + "Unmute %s?": "%s tartalmaniak megjelenítése?", + "Unmute": "Némítás visszavonása" +} diff --git a/plugins/Mute/languages/it.json b/plugins/Mute/languages/it.json new file mode 100644 index 00000000..b0246918 --- /dev/null +++ b/plugins/Mute/languages/it.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s Vuoi nascondere i contenuti di questo utente ?", + "Mute": "Attiva Silenzia", + "Unmute %s?": "%s Vuoi mostrare i contenuti di questo utente ?", + "Unmute": "Disattiva Silenzia" +} diff --git a/plugins/Mute/languages/pt-br.json b/plugins/Mute/languages/pt-br.json new file mode 100644 index 00000000..fd858678 --- /dev/null +++ b/plugins/Mute/languages/pt-br.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "Esconder todo conteúdo de %s?", + "Mute": "Ativar mudo", + "Unmute %s?": "Mostrar o contéudo de %s?", + "Unmute": "Desativar mudo" +} diff --git a/plugins/Mute/languages/zh-tw.json b/plugins/Mute/languages/zh-tw.json new file mode 100644 index 00000000..b8590e38 --- /dev/null +++ b/plugins/Mute/languages/zh-tw.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "屏蔽 %s 的所有內容?", + "Mute": "屏蔽", + "Unmute %s?": "對 %s 解除屏蔽?", + "Unmute": "解除屏蔽" +} \ No newline at end of file diff --git a/plugins/Mute/languages/zh.json b/plugins/Mute/languages/zh.json new file mode 100644 index 00000000..5c48a7d8 --- /dev/null +++ b/plugins/Mute/languages/zh.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "屏蔽 %s 的所有内容?", + "Mute": "屏蔽", + "Unmute %s?": "对 %s 解除屏蔽?", + "Unmute": "解除屏蔽" +} \ No newline at end of file diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py new file mode 100644 index 00000000..feaf8542 --- /dev/null +++ b/plugins/Newsfeed/NewsfeedPlugin.py @@ -0,0 +1,133 @@ +import time +import re + +from Plugin import PluginManager +from Db import DbQuery + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def formatSiteInfo(self, site, create_user=True): + site_info = super(UiWebsocketPlugin, self).formatSiteInfo(site, create_user=True) + feed_following = self.user.sites[site.address].get("follow", None) + if feed_following == None: + site_info["feed_follow_num"] = None + else: + site_info["feed_follow_num"] = len(feed_following) + return site_info + + def actionFeedFollow(self, to, feeds): + self.user.setFeedFollow(self.site.address, feeds) + self.user.save() + self.response(to, "ok") + + def actionFeedListFollow(self, to): + feeds = self.user.sites[self.site.address].get("follow", {}) + self.response(to, feeds) + + def actionFeedQuery(self, to, limit=10, day_limit=3): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "FeedQuery not allowed") + + from Site import SiteManager + rows = [] + for address, site_data in self.user.sites.iteritems(): + feeds = site_data.get("follow") + if not feeds: + continue + if type(feeds) is not dict: + self.log.debug("Invalid feed for site %s" % address) + continue + for name, query_set in feeds.iteritems(): + site = SiteManager.site_manager.get(address) + try: + query, params = query_set + query_parts = query.split("UNION") + for i, query_part in enumerate(query_parts): + db_query = DbQuery(query_part) + if day_limit: + where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit) + if "WHERE" in query_part: + query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part) + else: + query_part += where + query_parts[i] = query_part + query = " UNION ".join(query_parts) + + if ":params" in query: + query = query.replace(":params", ",".join(["?"] * len(params))) + res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit, params) + else: + res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit) + + except Exception, err: # Log error + self.log.error("%s feed query %s error: %s" % (address, name, err)) + continue + + for row in res: + row = dict(row) + if row["date_added"] > 1000000000000: # Formatted as millseconds + row["date_added"] = row["date_added"] / 1000 + if "date_added" not in row or row["date_added"] > time.time() + 120: + self.log.debug("Newsfeed from the future from from site %s" % address) + continue # Feed item is in the future, skip it + row["site"] = address + row["feed_name"] = name + rows.append(row) + return self.response(to, rows) + + def actionFeedSearch(self, to, search): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "FeedSearch not allowed") + + from Site import SiteManager + rows = [] + num_sites = 0 + s = time.time() + for address, site in SiteManager.site_manager.list().iteritems(): + if not site.storage.has_db: + continue + + if site.storage.db: # Database loaded + feeds = site.storage.db.schema.get("feeds") + else: + try: + feeds = site.storage.loadJson("dbschema.json").get("feeds") + except: + continue + + if not feeds: + continue + + num_sites += 1 + + for name, query in feeds.iteritems(): + try: + db_query = DbQuery(query) + db_query.wheres.append("%s LIKE ? OR %s LIKE ?" % (db_query.fields["body"], db_query.fields["title"])) + db_query.parts["ORDER BY"] = "date_added DESC" + db_query.parts["LIMIT"] = "30" + + search_like = "%" + search.replace(" ", "%") + "%" + res = site.storage.query(str(db_query), [search_like, search_like]) + except Exception, err: + self.log.error("%s feed query %s error: %s" % (address, name, err)) + continue + for row in res: + row = dict(row) + if row["date_added"] > time.time() + 120: + continue # Feed item is in the future, skip it + row["site"] = address + row["feed_name"] = name + rows.append(row) + return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": time.time() - s}) + + +@PluginManager.registerTo("User") +class UserPlugin(object): + # Set queries that user follows + def setFeedFollow(self, address, feeds): + site_data = self.getSiteData(address) + site_data["follow"] = feeds + self.save() + return site_data diff --git a/plugins/Newsfeed/__init__.py b/plugins/Newsfeed/__init__.py new file mode 100644 index 00000000..20cc04a1 --- /dev/null +++ b/plugins/Newsfeed/__init__.py @@ -0,0 +1 @@ +import NewsfeedPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py new file mode 100644 index 00000000..9df52f00 --- /dev/null +++ b/plugins/OptionalManager/ContentDbPlugin.py @@ -0,0 +1,401 @@ +import time +import collections +import itertools +import re + +import gevent + +from util import helper +from Plugin import PluginManager +from Config import config + +if "content_db" not in locals().keys(): # To keep between module reloads + content_db = None + + +@PluginManager.registerTo("ContentDb") +class ContentDbPlugin(object): + def __init__(self, *args, **kwargs): + global content_db + content_db = self + self.filled = {} # Site addresses that already filled from content.json + self.need_filling = False # file_optional table just created, fill data from content.json files + self.time_peer_numbers_updated = 0 + self.my_optional_files = {} # Last 50 site_address/inner_path called by fileWrite (auto-pinning these files) + self.optional_files = collections.defaultdict(dict) + self.optional_files_loading = False + helper.timer(60 * 5, self.checkOptionalLimit) + super(ContentDbPlugin, self).__init__(*args, **kwargs) + + def getSchema(self): + schema = super(ContentDbPlugin, self).getSchema() + + # Need file_optional table + schema["tables"]["file_optional"] = { + "cols": [ + ["file_id", "INTEGER PRIMARY KEY UNIQUE NOT NULL"], + ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"], + ["inner_path", "TEXT"], + ["hash_id", "INTEGER"], + ["size", "INTEGER"], + ["peer", "INTEGER DEFAULT 0"], + ["uploaded", "INTEGER DEFAULT 0"], + ["is_downloaded", "INTEGER DEFAULT 0"], + ["is_pinned", "INTEGER DEFAULT 0"], + ["time_added", "INTEGER DEFAULT 0"], + ["time_downloaded", "INTEGER DEFAULT 0"], + ["time_accessed", "INTEGER DEFAULT 0"] + ], + "indexes": [ + "CREATE UNIQUE INDEX file_optional_key ON file_optional (site_id, inner_path)", + "CREATE INDEX is_downloaded ON file_optional (is_downloaded)" + ], + "schema_changed": 11 + } + + return schema + + def initSite(self, site): + super(ContentDbPlugin, self).initSite(site) + if self.need_filling: + self.fillTableFileOptional(site) + if not self.optional_files_loading: + gevent.spawn_later(1, self.loadFilesOptional) + self.optional_files_loading = True + + def checkTables(self): + changed_tables = super(ContentDbPlugin, self).checkTables() + if "file_optional" in changed_tables: + self.need_filling = True + return changed_tables + + # Load optional files ending + def loadFilesOptional(self): + s = time.time() + num = 0 + total = 0 + total_downloaded = 0 + res = content_db.execute("SELECT site_id, inner_path, size, is_downloaded FROM file_optional") + site_sizes = collections.defaultdict(lambda: collections.defaultdict(int)) + for row in res: + self.optional_files[row["site_id"]][row["inner_path"][-8:]] = 1 + num += 1 + + # Update site size stats + site_sizes[row["site_id"]]["size_optional"] += row["size"] + if row["is_downloaded"]: + site_sizes[row["site_id"]]["optional_downloaded"] += row["size"] + + # Site site size stats to sites.json settings + site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} + for site_id, stats in site_sizes.iteritems(): + site_address = site_ids_reverse.get(site_id) + if not site_address: + self.log.error("Not found site_id: %s" % site_id) + continue + site = self.sites[site_address] + site.settings["size_optional"] = stats["size_optional"] + site.settings["optional_downloaded"] = stats["optional_downloaded"] + total += stats["size_optional"] + total_downloaded += stats["optional_downloaded"] + + self.log.debug( + "Loaded %s optional files: %.2fMB, downloaded: %.2fMB in %.3fs" % + (num, float(total) / 1024 / 1024, float(total_downloaded) / 1024 / 1024, time.time() - s) + ) + + if self.need_filling and self.getOptionalLimitBytes() >= 0 and self.getOptionalLimitBytes() < total_downloaded: + limit_bytes = self.getOptionalLimitBytes() + limit_new = round((float(total_downloaded) / 1024 / 1024 / 1024) * 1.1, 2) # Current limit + 10% + self.log.debug( + "First startup after update and limit is smaller than downloaded files size (%.2fGB), increasing it from %.2fGB to %.2fGB" % + (float(total_downloaded) / 1024 / 1024 / 1024, float(limit_bytes) / 1024 / 1024 / 1024, limit_new) + ) + config.saveValue("optional_limit", limit_new) + config.optional_limit = str(limit_new) + + # Predicts if the file is optional + def isOptionalFile(self, site_id, inner_path): + return self.optional_files[site_id].get(inner_path[-8:]) + + # Fill file_optional table with optional files found in sites + def fillTableFileOptional(self, site): + s = time.time() + site_id = self.site_ids.get(site.address) + if not site_id: + return False + cur = self.getCursor() + cur.execute("BEGIN") + res = cur.execute("SELECT * FROM content WHERE size_files_optional > 0 AND site_id = %s" % site_id) + num = 0 + for row in res.fetchall(): + content = site.content_manager.contents[row["inner_path"]] + try: + num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur) + except Exception, err: + self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err)) + cur.execute("COMMIT") + cur.close() + + # Set my files to pinned + from User import UserManager + user = UserManager.user_manager.get() + if not user: + user = UserManager.user_manager.create() + auth_address = user.getAuthAddress(site.address) + self.execute( + "UPDATE file_optional SET is_pinned = 1 WHERE site_id = :site_id AND inner_path LIKE :inner_path", + {"site_id": site_id, "inner_path": "%%/%s/%%" % auth_address} + ) + + self.log.debug( + "Filled file_optional table for %s in %.3fs (loaded: %s, is_pinned: %s)" % + (site.address, time.time() - s, num, self.cur.cursor.rowcount) + ) + self.filled[site.address] = True + + def setContentFilesOptional(self, site, content_inner_path, content, cur=None): + if not cur: + cur = self + try: + cur.execute("BEGIN") + except Exception, err: + self.log.warning("Transaction begin error %s %s: %s" % (site, content_inner_path, Debug.formatException(err))) + + num = 0 + site_id = self.site_ids[site.address] + content_inner_dir = helper.getDirname(content_inner_path) + for relative_inner_path, file in content.get("files_optional", {}).iteritems(): + file_inner_path = content_inner_dir + relative_inner_path + hash_id = int(file["sha512"][0:4], 16) + if hash_id in site.content_manager.hashfield: + is_downloaded = 1 + else: + is_downloaded = 0 + if site.address + "/" + file_inner_path in self.my_optional_files: + is_pinned = 1 + else: + is_pinned = 0 + cur.insertOrUpdate("file_optional", { + "hash_id": hash_id, + "size": int(file["size"]), + "is_pinned": is_pinned + }, { + "site_id": site_id, + "inner_path": file_inner_path + }, oninsert={ + "time_added": int(time.time()), + "time_downloaded": int(time.time()) if is_downloaded else 0, + "is_downloaded": is_downloaded, + "peer": is_downloaded + }) + self.optional_files[site_id][file_inner_path[-8:]] = 1 + num += 1 + + if cur == self: + try: + cur.execute("END") + except Exception, err: + self.log.warning("Transaction end error %s %s: %s" % (site, content_inner_path, Debug.formatException(err))) + return num + + def setContent(self, site, inner_path, content, size=0): + super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size) + old_content = site.content_manager.contents.get(inner_path, {}) + if (not self.need_filling or self.filled.get(site.address)) and "files_optional" in content or "files_optional" in old_content: + self.setContentFilesOptional(site, inner_path, content) + # Check deleted files + if old_content: + old_files = old_content.get("files_optional", {}).keys() + new_files = content.get("files_optional", {}).keys() + content_inner_dir = helper.getDirname(inner_path) + deleted = [content_inner_dir + key for key in old_files if key not in new_files] + if deleted: + site_id = self.site_ids[site.address] + self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": deleted}) + + def deleteContent(self, site, inner_path): + content = site.content_manager.contents.get(inner_path) + if content and "files_optional" in content: + site_id = self.site_ids[site.address] + content_inner_dir = helper.getDirname(inner_path) + optional_inner_paths = [ + content_inner_dir + relative_inner_path + for relative_inner_path in content.get("files_optional", {}).keys() + ] + self.execute("DELETE FROM file_optional WHERE ?", {"site_id": site_id, "inner_path": optional_inner_paths}) + super(ContentDbPlugin, self).deleteContent(site, inner_path) + + def updatePeerNumbers(self): + s = time.time() + num_file = 0 + num_updated = 0 + num_site = 0 + for site in self.sites.values(): + if not site.content_manager.has_optional_files: + continue + has_updated_hashfield = next(( + peer + for peer in site.peers.itervalues() + if peer.has_hashfield and peer.hashfield.time_changed > self.time_peer_numbers_updated + ), None) + + if not has_updated_hashfield and site.content_manager.hashfield.time_changed < self.time_peer_numbers_updated: + continue + + hashfield_peers = itertools.chain.from_iterable( + peer.hashfield.storage + for peer in site.peers.itervalues() + if peer.has_hashfield + ) + peer_nums = collections.Counter( + itertools.chain( + hashfield_peers, + site.content_manager.hashfield + ) + ) + + site_id = self.site_ids[site.address] + if not site_id: + continue + + res = self.execute("SELECT file_id, hash_id, peer FROM file_optional WHERE ?", {"site_id": site_id}) + updates = {} + for row in res: + peer_num = peer_nums.get(row["hash_id"], 0) + if peer_num != row["peer"]: + updates[row["file_id"]] = peer_num + + self.execute("BEGIN") + for file_id, peer_num in updates.iteritems(): + self.execute("UPDATE file_optional SET peer = ? WHERE file_id = ?", (peer_num, file_id)) + self.execute("END") + + num_updated += len(updates) + num_file += len(peer_nums) + num_site += 1 + + self.time_peer_numbers_updated = time.time() + self.log.debug("%s/%s peer number for %s site updated in %.3fs" % (num_updated, num_file, num_site, time.time() - s)) + + def queryDeletableFiles(self): + # First return the files with atleast 10 seeder and not accessed in last weed + query = """ + SELECT * FROM file_optional + WHERE peer > 10 AND is_downloaded = 1 AND is_pinned = 0 + ORDER BY time_accessed < %s DESC, uploaded / size + """ % int(time.time() - 60 * 60 * 7) + limit_start = 0 + while 1: + num = 0 + res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) + for row in res: + yield row + num += 1 + if num < 50: + break + limit_start += 50 + + self.log.debug("queryDeletableFiles returning less-seeded files") + + # Then return files less seeder but still not accessed in last week + query = """ + SELECT * FROM file_optional + WHERE is_downloaded = 1 AND peer <= 10 AND is_pinned = 0 + ORDER BY peer DESC, time_accessed < %s DESC, uploaded / size + """ % int(time.time() - 60 * 60 * 7) + limit_start = 0 + while 1: + num = 0 + res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) + for row in res: + yield row + num += 1 + if num < 50: + break + limit_start += 50 + + self.log.debug("queryDeletableFiles returning everyting") + + # At the end return all files + query = """ + SELECT * FROM file_optional + WHERE is_downloaded = 1 AND peer <= 10 AND is_pinned = 0 + ORDER BY peer DESC, time_accessed, uploaded / size + """ + limit_start = 0 + while 1: + num = 0 + res = self.execute("%s LIMIT %s, 50" % (query, limit_start)) + for row in res: + yield row + num += 1 + if num < 50: + break + limit_start += 50 + + def getOptionalLimitBytes(self): + if config.optional_limit.endswith("%"): + limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit)) + limit_bytes = helper.getFreeSpace() * (limit_percent / 100) + else: + limit_bytes = float(re.sub("[^0-9.]", "", config.optional_limit)) * 1024 * 1024 * 1024 + return limit_bytes + + def getOptionalNeedDelete(self, size): + if config.optional_limit.endswith("%"): + limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit)) + need_delete = size - ((helper.getFreeSpace() + size) * (limit_percent / 100)) + else: + need_delete = size - self.getOptionalLimitBytes() + return need_delete + + def checkOptionalLimit(self, limit=None): + if not limit: + limit = self.getOptionalLimitBytes() + + if limit < 0: + self.log.debug("Invalid limit for optional files: %s" % limit) + return False + + size = self.execute("SELECT SUM(size) FROM file_optional WHERE is_downloaded = 1 AND is_pinned = 0").fetchone()[0] + if not size: + size = 0 + + need_delete = self.getOptionalNeedDelete(size) + + self.log.debug( + "Optional size: %.1fMB/%.1fMB, Need delete: %.1fMB" % + (float(size) / 1024 / 1024, float(limit) / 1024 / 1024, float(need_delete) / 1024 / 1024) + ) + if need_delete <= 0: + return False + + self.updatePeerNumbers() + + site_ids_reverse = {val: key for key, val in self.site_ids.iteritems()} + deleted_file_ids = [] + for row in self.queryDeletableFiles(): + site_address = site_ids_reverse.get(row["site_id"]) + site = self.sites.get(site_address) + if not site: + self.log.error("No site found for id: %s" % row["site_id"]) + continue + site.log.debug("Deleting %s %.3f MB left" % (row["inner_path"], float(need_delete) / 1024 / 1024)) + deleted_file_ids.append(row["file_id"]) + try: + site.content_manager.optionalRemove(row["inner_path"], row["hash_id"], row["size"]) + site.storage.delete(row["inner_path"]) + need_delete -= row["size"] + except Exception, err: + site.log.error("Error deleting %s: %s" % (row["inner_path"], err)) + + if need_delete <= 0: + break + + cur = self.getCursor() + cur.execute("BEGIN") + for file_id in deleted_file_ids: + cur.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"file_id": file_id}) + cur.execute("COMMIT") + cur.close() diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py new file mode 100644 index 00000000..687c87af --- /dev/null +++ b/plugins/OptionalManager/OptionalManagerPlugin.py @@ -0,0 +1,117 @@ +import time +import collections + +from util import helper +from Plugin import PluginManager +import ContentDbPlugin + + +def processAccessLog(): + if access_log: + content_db = ContentDbPlugin.content_db + now = int(time.time()) + num = 0 + for site_id in access_log: + content_db.execute( + "UPDATE file_optional SET time_accessed = %s WHERE ?" % now, + {"site_id": site_id, "inner_path": access_log[site_id].keys()} + ) + num += len(access_log[site_id]) + access_log.clear() + + +def processRequestLog(): + if request_log: + content_db = ContentDbPlugin.content_db + cur = content_db.getCursor() + num = 0 + cur.execute("BEGIN") + for site_id in request_log: + for inner_path, uploaded in request_log[site_id].iteritems(): + content_db.execute( + "UPDATE file_optional SET uploaded = uploaded + %s WHERE ?" % uploaded, + {"site_id": site_id, "inner_path": inner_path} + ) + num += 1 + cur.execute("END") + request_log.clear() + + +if "access_log" not in locals().keys(): # To keep between module reloads + access_log = collections.defaultdict(dict) # {site_id: {inner_path1: 1, inner_path2: 1...}} + request_log = collections.defaultdict(lambda: collections.defaultdict(int)) # {site_id: {inner_path1: 1, inner_path2: 1...}} + helper.timer(61, processAccessLog) + helper.timer(60, processRequestLog) + + +@PluginManager.registerTo("WorkerManager") +class WorkerManagerPlugin(object): + def doneTask(self, task): + if task["optional_hash_id"]: + content_db = self.site.content_manager.contents.db + content_db.executeDelayed( + "UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path", + {"now": int(time.time()), "site_id": content_db.site_ids[self.site.address], "inner_path": task["inner_path"]} + ) + + super(WorkerManagerPlugin, self).doneTask(task) + + if task["optional_hash_id"] and not self.tasks: + content_db.processDelayed() + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def parsePath(self, path): + global access_log + path_parts = super(UiRequestPlugin, self).parsePath(path) + if path_parts: + site_id = ContentDbPlugin.content_db.site_ids.get(path_parts["request_address"]) + if site_id: + if ContentDbPlugin.content_db.isOptionalFile(site_id, path_parts["inner_path"]): + access_log[site_id][path_parts["inner_path"]] = 1 + return path_parts + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def actionGetFile(self, params): + stats = super(FileRequestPlugin, self).actionGetFile(params) + self.recordFileRequest(params["site"], params["inner_path"], stats) + return stats + + def actionStreamFile(self, params): + stats = super(FileRequestPlugin, self).actionStreamFile(params) + self.recordFileRequest(params["site"], params["inner_path"], stats) + return stats + + def recordFileRequest(self, site_address, inner_path, stats): + if not stats: + # Only track the last request of files + return False + site_id = ContentDbPlugin.content_db.site_ids[site_address] + if site_id and ContentDbPlugin.content_db.isOptionalFile(site_id, inner_path): + request_log[site_id][inner_path] += stats["bytes_sent"] + + +@PluginManager.registerTo("Site") +class SitePlugin(object): + def isDownloadable(self, inner_path): + is_downloadable = super(SitePlugin, self).isDownloadable(inner_path) + if is_downloadable: + return is_downloadable + + for path in self.settings.get("optional_help", {}).iterkeys(): + if inner_path.startswith(path): + return True + + return False + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("OptionalManager plugin") + group.add_argument('--optional_limit', help='Limit total size of optional files', default="10%", metavar="GB or free space %") + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py new file mode 100644 index 00000000..a61aa078 --- /dev/null +++ b/plugins/OptionalManager/Test/TestOptionalManager.py @@ -0,0 +1,42 @@ +import hashlib +import os +import copy + +import pytest + +from OptionalManager import OptionalManagerPlugin +from util import helper + + +@pytest.mark.usefixtures("resetSettings") +class TestOptionalManager: + def testDbFill(self, site): + contents = site.content_manager.contents + assert len(site.content_manager.hashfield) > 0 + assert contents.db.execute("SELECT COUNT(*) FROM file_optional WHERE is_downloaded = 1").fetchone()[0] == len(site.content_manager.hashfield) + + def testSetContent(self, site): + contents = site.content_manager.contents + + # Add new file + new_content = copy.deepcopy(contents["content.json"]) + new_content["files_optional"]["testfile"] = { + "size": 1234, + "sha512": "aaaabbbbcccc" + } + num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] + contents["content.json"] = new_content + assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] > num_optional_files_before + + # Remove file + new_content = copy.deepcopy(contents["content.json"]) + del new_content["files_optional"]["testfile"] + num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] + contents["content.json"] = new_content + assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before + + def testDeleteContent(self, site): + contents = site.content_manager.contents + num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] + del contents["content.json"] + assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before diff --git a/plugins/OptionalManager/Test/conftest.py b/plugins/OptionalManager/Test/conftest.py new file mode 100644 index 00000000..8c1df5b2 --- /dev/null +++ b/plugins/OptionalManager/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/OptionalManager/Test/pytest.ini b/plugins/OptionalManager/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/OptionalManager/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py new file mode 100644 index 00000000..91f7f6f8 --- /dev/null +++ b/plugins/OptionalManager/UiWebsocketPlugin.py @@ -0,0 +1,260 @@ +import re +import time +import cgi + +import gevent + +from Plugin import PluginManager +from Config import config +from util import helper +from Translate import Translate + +if "_" not in locals(): + _ = Translate("plugins/OptionalManager/languages/") + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def __init__(self, *args, **kwargs): + self.time_peer_numbers_updated = 0 + super(UiWebsocketPlugin, self).__init__(*args, **kwargs) + + def actionFileWrite(self, to, inner_path, *args, **kwargs): + super(UiWebsocketPlugin, self).actionFileWrite(to, inner_path, *args, **kwargs) + + # Add file to content.db and set it as pinned + content_db = self.site.content_manager.contents.db + content_db.my_optional_files[self.site.address + "/" + inner_path] = time.time() + if len(content_db.my_optional_files) > 50: # Keep only last 50 + oldest_key = min( + content_db.my_optional_files.iterkeys(), + key=(lambda key: content_db.my_optional_files[key]) + ) + del content_db.my_optional_files[oldest_key] + + def updatePeerNumbers(self): + content_db = self.site.content_manager.contents.db + content_db.updatePeerNumbers() + self.site.updateWebsocket(peernumber_updated=True) + + # Optional file functions + + def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10): + if not address: + address = self.site.address + + # Update peer numbers if necessary + content_db = self.site.content_manager.contents.db + if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5: + # Start in new thread to avoid blocking + self.time_peer_numbers_updated = time.time() + gevent.spawn(self.updatePeerNumbers) + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + if not all([re.match("^[a-z_*/+-]+( DESC| ASC|)$", part.strip()) for part in orderby.split(",")]): + return self.response(to, "Invalid order_by") + + if type(limit) != int: + return self.response(to, "Invalid limit") + + back = [] + content_db = self.site.content_manager.contents.db + site_id = content_db.site_ids[address] + query = "SELECT * FROM file_optional WHERE site_id = %s AND is_downloaded = 1 ORDER BY %s LIMIT %s" % (site_id, orderby, limit) + for row in content_db.execute(query): + back.append(dict(row)) + self.response(to, back) + + def actionOptionalFileInfo(self, to, inner_path): + content_db = self.site.content_manager.contents.db + site_id = content_db.site_ids[self.site.address] + + # Update peer numbers if necessary + if time.time() - content_db.time_peer_numbers_updated > 60 * 1 and time.time() - self.time_peer_numbers_updated > 60 * 5: + # Start in new thread to avoid blocking + self.time_peer_numbers_updated = time.time() + gevent.spawn(self.updatePeerNumbers) + + query = "SELECT * FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1" + res = content_db.execute(query, {"site_id": site_id, "inner_path": inner_path}) + row = next(res, None) + if row: + self.response(to, dict(row)) + else: + self.response(to, None) + + def setPin(self, inner_path, is_pinned, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return {"error": "Forbidden"} + + site = self.server.sites[address] + + content_db = site.content_manager.contents.db + site_id = content_db.site_ids[site.address] + content_db.execute("UPDATE file_optional SET is_pinned = %s WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path}) + + return "ok" + + def actionOptionalFilePin(self, to, inner_path, address=None): + back = self.setPin(inner_path, 1, address) + if back == "ok": + self.cmd("notification", ["done", _["Pinned %s files"] % len(inner_path) if type(inner_path) is list else 1, 5000]) + self.response(to, back) + + def actionOptionalFileUnpin(self, to, inner_path, address=None): + back = self.setPin(inner_path, 0, address) + if back == "ok": + self.cmd("notification", ["done", _["Removed pin from %s files"] % len(inner_path) if type(inner_path) is list else 1, 5000]) + self.response(to, back) + + def actionOptionalFileDelete(self, to, inner_path, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + content_db = site.content_manager.contents.db + site_id = content_db.site_ids[site.address] + + res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path}) + row = next(res, None) + + if not row: + return self.response(to, {"error": "Not found in content.db"}) + + removed = site.content_manager.optionalRemove(inner_path, row["hash_id"], row["size"]) + # if not removed: + # return self.response(to, {"error": "Not found in hash_id: %s" % row["hash_id"]}) + + content_db.execute("UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE ?", {"site_id": site_id, "inner_path": inner_path}) + + try: + site.storage.delete(inner_path) + except Exception, err: + return self.response(to, {"error": "File delete error: %s" % err}) + + self.response(to, "ok") + + + # Limit functions + + def actionOptionalLimitStats(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Forbidden") + + back = {} + back["limit"] = config.optional_limit + back["used"] = self.site.content_manager.contents.db.execute( + "SELECT SUM(size) FROM file_optional WHERE is_downloaded = 1 AND is_pinned = 0" + ).fetchone()[0] + back["free"] = helper.getFreeSpace() + + self.response(to, back) + + def actionOptionalLimitSet(self, to, limit): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, {"error": "Forbidden"}) + config.optional_limit = re.sub("\.0+$", "", limit) # Remove unnecessary digits from end + config.saveValue("optional_limit", limit) + self.response(to, "ok") + + # Distribute help functions + + def actionOptionalHelpList(self, to, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + self.response(to, site.settings.get("optional_help", {})) + + def actionOptionalHelp(self, to, directory, title, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + content_db = site.content_manager.contents.db + site_id = content_db.site_ids[address] + + if "optional_help" not in site.settings: + site.settings["optional_help"] = {} + + stats = content_db.execute( + "SELECT COUNT(*) AS num, SUM(size) AS size FROM file_optional WHERE site_id = :site_id AND inner_path LIKE :inner_path", + {"site_id": site_id, "inner_path": directory + "%"} + ).fetchone() + stats = dict(stats) + + if not stats["size"]: + stats["size"] = 0 + if not stats["num"]: + stats["num"] = 0 + + self.cmd("notification", [ + "done", + _["You started to help distribute %s.
Directory: %s"] % + (cgi.escape(title), cgi.escape(directory)), + 10000 + ]) + + site.settings["optional_help"][directory] = title + + self.response(to, dict(stats)) + + def actionOptionalHelpRemove(self, to, directory, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + try: + del site.settings["optional_help"][directory] + self.response(to, "ok") + except Exception: + self.response(to, {"error": "Not found"}) + + def cbOptionalHelpAll(self, to, site, value): + site.settings["autodownloadoptional"] = value + self.response(to, value) + + def actionOptionalHelpAll(self, to, value, address=None): + if not address: + address = self.site.address + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) + + site = self.server.sites[address] + + if value: + if "ADMIN" in self.site.settings["permissions"]: + self.cbOptionalHelpAll(to, site, True) + else: + site_title = site.content_manager.contents["content.json"].get("title", address) + self.cmd( + "confirm", + [ + _["Help distribute all new optional files on site %s"] % cgi.escape(site_title), + _["Yes, I want to help!"] + ], + lambda (res): self.cbOptionalHelpAll(to, site, True) + ) + else: + site.settings["autodownloadoptional"] = False + self.response(to, False) diff --git a/plugins/OptionalManager/__init__.py b/plugins/OptionalManager/__init__.py new file mode 100644 index 00000000..02969bba --- /dev/null +++ b/plugins/OptionalManager/__init__.py @@ -0,0 +1 @@ +import OptionalManagerPlugin \ No newline at end of file diff --git a/plugins/OptionalManager/languages/fr.json b/plugins/OptionalManager/languages/fr.json new file mode 100644 index 00000000..47a563dc --- /dev/null +++ b/plugins/OptionalManager/languages/fr.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Fichiers %s épinglés", + "Removed pin from %s files": "Fichiers %s ne sont plus épinglés", + "You started to help distribute %s.
Directory: %s": "Vous avez commencé à aider à distribuer %s.
Dossier : %s", + "Help distribute all new optional files on site %s": "Aider à distribuer tous les fichiers optionnels du site %s", + "Yes, I want to help!": "Oui, je veux aider !" +} diff --git a/plugins/OptionalManager/languages/hu.json b/plugins/OptionalManager/languages/hu.json new file mode 100644 index 00000000..7a23b86c --- /dev/null +++ b/plugins/OptionalManager/languages/hu.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "%s fájl rögzítve", + "Removed pin from %s files": "%s fájl rögzítés eltávolítva", + "You started to help distribute %s.
Directory: %s": "Új segítség a terjesztésben: %s.
Könyvtár: %s", + "Help distribute all new optional files on site %s": "Segítség az összes új opcionális fájl terjesztésében az %s oldalon", + "Yes, I want to help!": "Igen, segíteni akarok!" +} diff --git a/plugins/OptionalManager/languages/pt-br.json b/plugins/OptionalManager/languages/pt-br.json new file mode 100644 index 00000000..21d90cc0 --- /dev/null +++ b/plugins/OptionalManager/languages/pt-br.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Arquivos %s fixados", + "Removed pin from %s files": "Arquivos %s não estão fixados", + "You started to help distribute %s.
Directory: %s": "Você começou a ajudar a distribuir %s.
Pasta: %s", + "Help distribute all new optional files on site %s": "Ajude a distribuir todos os novos arquivos opcionais no site %s", + "Yes, I want to help!": "Sim, eu quero ajudar!" +} diff --git a/plugins/OptionalManager/languages/zh-tw.json b/plugins/OptionalManager/languages/zh-tw.json new file mode 100644 index 00000000..dfa9eaf3 --- /dev/null +++ b/plugins/OptionalManager/languages/zh-tw.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "已固定 %s 個檔", + "Removed pin from %s files": "已解除固定 %s 個檔", + "You started to help distribute %s.
Directory: %s": "你已經開始幫助分發 %s
目錄:%s", + "Help distribute all new optional files on site %s": "你想要幫助分發 %s 網站的所有檔嗎?", + "Yes, I want to help!": "是,我想要幫助!" +} diff --git a/plugins/OptionalManager/languages/zh.json b/plugins/OptionalManager/languages/zh.json new file mode 100644 index 00000000..bb9161f5 --- /dev/null +++ b/plugins/OptionalManager/languages/zh.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "已固定 %s 个文件", + "Removed pin from %s files": "已解除固定 %s 个文件", + "You started to help distribute %s.
Directory: %s": "你已经开始帮助分发 %s
目录:%s", + "Help distribute all new optional files on site %s": "你想要帮助分发 %s 站点的所有文件吗?", + "Yes, I want to help!": "是,我想要帮助!" +} diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py new file mode 100644 index 00000000..858b22ea --- /dev/null +++ b/plugins/PeerDb/PeerDbPlugin.py @@ -0,0 +1,94 @@ +import time +import sqlite3 +import random +import atexit + +import gevent +from Plugin import PluginManager + + +@PluginManager.registerTo("ContentDb") +class ContentDbPlugin(object): + def __init__(self, *args, **kwargs): + atexit.register(self.saveAllPeers) + super(ContentDbPlugin, self).__init__(*args, **kwargs) + + def getSchema(self): + schema = super(ContentDbPlugin, self).getSchema() + + schema["tables"]["peer"] = { + "cols": [ + ["site_id", "INTEGER REFERENCES site (site_id) ON DELETE CASCADE"], + ["address", "TEXT NOT NULL"], + ["port", "INTEGER NOT NULL"], + ["hashfield", "BLOB"], + ["time_added", "INTEGER NOT NULL"] + ], + "indexes": [ + "CREATE UNIQUE INDEX peer_key ON peer (site_id, address, port)" + ], + "schema_changed": 1 + } + + return schema + + def loadPeers(self, site): + s = time.time() + site_id = self.site_ids.get(site.address) + res = self.execute("SELECT * FROM peer WHERE site_id = :site_id", {"site_id": site_id}) + num = 0 + num_hashfield = 0 + for row in res: + peer = site.addPeer(row["address"], row["port"]) + if not peer: # Already exist + continue + if row["hashfield"]: + peer.hashfield.replaceFromString(row["hashfield"]) + num_hashfield += 1 + peer.time_added = row["time_added"] + peer.reputation = int((time.time() - peer.time_added) / (60 * 60 * 24)) # Boost reputation for older peers + if row["address"].endswith(".onion"): + peer.reputation = peer.reputation / 2 # Onion peers less likely working + num += 1 + site.log.debug("%s peers (%s with hashfield) loaded in %.3fs" % (num, num_hashfield, time.time() - s)) + + def iteratePeers(self, site): + site_id = self.site_ids.get(site.address) + for key, peer in site.peers.iteritems(): + address, port = key.split(":") + if peer.has_hashfield: + hashfield = sqlite3.Binary(peer.hashfield.tostring()) + else: + hashfield = "" + yield (site_id, address, port, hashfield, int(peer.time_added)) + + def savePeers(self, site, spawn=False): + if spawn: + # Save peers every hour (+random some secs to not update very site at same time) + gevent.spawn_later(60 * 60 + random.randint(0, 60), self.savePeers, site, spawn=True) + if not site.peers: + site.log.debug("Peers not saved: No peers found") + return + s = time.time() + site_id = self.site_ids.get(site.address) + cur = self.getCursor() + cur.execute("BEGIN") + self.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id}) + self.cur.cursor.executemany( + "INSERT INTO peer (site_id, address, port, hashfield, time_added) VALUES (?, ?, ?, ?, ?)", + self.iteratePeers(site) + ) + cur.execute("END") + site.log.debug("Peers saved in %.3fs" % (time.time() - s)) + + def initSite(self, site): + super(ContentDbPlugin, self).initSite(site) + gevent.spawn_later(0.5, self.loadPeers, site) + gevent.spawn_later(60*60, self.savePeers, site, spawn=True) + + def saveAllPeers(self): + for site in self.sites.values(): + try: + self.savePeers(site) + except Exception, err: + site.log.error("Save peer error: %s" % err) diff --git a/plugins/PeerDb/__init__.py b/plugins/PeerDb/__init__.py new file mode 100644 index 00000000..967561dc --- /dev/null +++ b/plugins/PeerDb/__init__.py @@ -0,0 +1,2 @@ +import PeerDbPlugin + diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py new file mode 100644 index 00000000..97d8f521 --- /dev/null +++ b/plugins/Sidebar/SidebarPlugin.py @@ -0,0 +1,605 @@ +import re +import os +import cgi +import sys +import math +import time +import json +try: + import cStringIO as StringIO +except: + import StringIO + +import gevent + +from Config import config +from Plugin import PluginManager +from Debug import Debug +from Translate import Translate +from util import helper + +plugin_dir = "plugins/Sidebar" +media_dir = plugin_dir + "/media" +sys.path.append(plugin_dir) # To able to load geoip lib + +loc_cache = {} +if "_" not in locals(): + _ = Translate(plugin_dir + "/languages/") + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Inject our resources to end of original file streams + def actionUiMedia(self, path): + if path == "/uimedia/all.js" or path == "/uimedia/all.css": + # First yield the original file and header + body_generator = super(UiRequestPlugin, self).actionUiMedia(path) + for part in body_generator: + yield part + + # Append our media file to the end + ext = re.match(".*(js|css)$", path).group(1) + plugin_media_file = "%s/all.%s" % (media_dir, ext) + if config.debug: + # If debugging merge *.css to all.css and *.js to all.js + from Debug import DebugMedia + DebugMedia.merge(plugin_media_file) + if ext == "js": + yield _.translateData(open(plugin_media_file).read()) + else: + for part in self.actionFile(plugin_media_file, send_header=False): + yield part + elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files + file_name = re.match(".*/(.*)", path).group(1) + plugin_media_file = "%s-globe/%s" % (media_dir, file_name) + if config.debug and path.endswith("all.js"): + # If debugging merge *.css to all.css and *.js to all.js + from Debug import DebugMedia + DebugMedia.merge(plugin_media_file) + for part in self.actionFile(plugin_media_file): + yield part + else: + for part in super(UiRequestPlugin, self).actionUiMedia(path): + yield part + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def sidebarRenderPeerStats(self, body, site): + connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]) + connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")]) + onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id]) + peers_total = len(site.peers) + if peers_total: + percent_connected = float(connected) / peers_total + percent_connectable = float(connectable) / peers_total + percent_onion = float(onion) / peers_total + else: + percent_connectable = percent_connected = percent_onion = 0 + + body.append(_(u""" +
  • + +
      +
    • +
    • +
    • +
    • +
    +
      +
    • {_[Connected]}:{connected}
    • +
    • {_[Connectable]}:{connectable}
    • +
    • {_[Onion]}:{onion}
    • +
    • {_[Total]}:{peers_total}
    • +
    +
  • + """)) + + def sidebarRenderTransferStats(self, body, site): + recv = float(site.settings.get("bytes_recv", 0)) / 1024 / 1024 + sent = float(site.settings.get("bytes_sent", 0)) / 1024 / 1024 + transfer_total = recv + sent + if transfer_total: + percent_recv = recv / transfer_total + percent_sent = sent / transfer_total + else: + percent_recv = 0.5 + percent_sent = 0.5 + + body.append(_(u""" +
  • + +
      +
    • +
    • +
    +
      +
    • {_[Received]}:{recv:.2f}MB
    • +
    • {_[Sent]}:{sent:.2f}MB
    • +
    +
  • + """)) + + def sidebarRenderFileStats(self, body, site): + body.append(_(u"
    • ")) + + extensions = ( + ("html", "yellow"), + ("css", "orange"), + ("js", "purple"), + ("Image", "green"), + ("json", "darkblue"), + ("User data", "blue"), + ("Other", "white"), + ("Total", "black") + ) + # Collect stats + size_filetypes = {} + size_total = 0 + contents = site.content_manager.listContents() # Without user files + for inner_path in contents: + content = site.content_manager.contents[inner_path] + if "files" not in content: + continue + for file_name, file_details in content["files"].items(): + size_total += file_details["size"] + ext = file_name.split(".")[-1] + size_filetypes[ext] = size_filetypes.get(ext, 0) + file_details["size"] + + # Get user file sizes + size_user_content = site.content_manager.contents.execute( + "SELECT SUM(size) + SUM(size_files) AS size FROM content WHERE ?", + {"not__inner_path": contents} + ).fetchone()["size"] + if not size_user_content: + size_user_content = 0 + size_filetypes["User data"] = size_user_content + size_total += size_user_content + + # The missing difference is content.json sizes + if "json" in size_filetypes: + size_filetypes["json"] += max(0, site.settings["size"] - size_total) + size_total = size_other = site.settings["size"] + + # Bar + for extension, color in extensions: + if extension == "Total": + continue + if extension == "Other": + size = max(0, size_other) + elif extension == "Image": + size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) + size_other -= size + else: + size = size_filetypes.get(extension, 0) + size_other -= size + if size_total == 0: + percent = 0 + else: + percent = 100 * (float(size) / size_total) + percent = math.floor(percent * 100) / 100 # Floor to 2 digits + body.append( + u"""
    • """ % + (percent, _[extension], color, _[extension]) + ) + + # Legend + body.append("
      ") + for extension, color in extensions: + if extension == "Other": + size = max(0, size_other) + elif extension == "Image": + size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) + elif extension == "Total": + size = size_total + else: + size = size_filetypes.get(extension, 0) + + if extension == "js": + title = "javascript" + else: + title = extension + + if size > 1024 * 1024 * 10: # Format as mB is more than 10mB + size_formatted = "%.0fMB" % (size / 1024 / 1024) + else: + size_formatted = "%.0fkB" % (size / 1024) + + body.append(u"
    • %s:%s
    • " % (color, _[title], size_formatted)) + + body.append("
  • ") + + def sidebarRenderSizeLimit(self, body, site): + free_space = helper.getFreeSpace() / 1024 / 1024 + size = float(site.settings["size"]) / 1024 / 1024 + size_limit = site.getSizeLimit() + percent_used = size / size_limit + + body.append(_(u""" +
  • + + MB + {_[Set]} +
  • + """)) + + def sidebarRenderOptionalFileStats(self, body, site): + size_total = float(site.settings["size_optional"]) + size_downloaded = float(site.settings["optional_downloaded"]) + + if not size_total: + return False + + percent_downloaded = size_downloaded / size_total + + size_formatted_total = size_total / 1024 / 1024 + size_formatted_downloaded = size_downloaded / 1024 / 1024 + + body.append(_(u""" +
  • + +
      +
    • +
    • +
    +
      +
    • {_[Downloaded]}:{size_formatted_downloaded:.2f}MB
    • +
    • {_[Total]}:{size_formatted_total:.2f}MB
    • +
    +
  • + """)) + + return True + + def sidebarRenderOptionalFileSettings(self, body, site): + if self.site.settings.get("autodownloadoptional"): + checked = "checked='checked'" + else: + checked = "" + + body.append(_(u""" +
  • + +
    +
  • + """)) + + def sidebarRenderBadFiles(self, body, site): + body.append(_(u""" +
  • + +
      + """)) + + i = 0 + for bad_file, tries in site.bad_files.iteritems(): + i += 1 + body.append(_(u"""
    • {bad_file}
    • """, { + "bad_file": cgi.escape(bad_file, True), "tries": _.pluralize(tries, "{} try", "{} tries") + })) + if i > 30: + break + + if len(site.bad_files) > 30: + num_bad_files = len(site.bad_files) - 30 + body.append(_(u"""
    • {_[+ {num_bad_files} more]}
    • """, nested=True)) + + body.append(""" +
    +
  • + """) + + def sidebarRenderDbOptions(self, body, site): + if site.storage.db: + inner_path = site.storage.getInnerPath(site.storage.db.db_path) + size = float(site.storage.getSize(inner_path)) / 1024 + feeds = len(site.storage.db.schema.get("feeds", {})) + else: + inner_path = _[u"No database found"] + size = 0.0 + feeds = 0 + + body.append(_(u""" +
  • + + +
  • + """, nested=True)) + + def sidebarRenderIdentity(self, body, site): + auth_address = self.user.getAuthAddress(self.site.address) + rules = self.site.content_manager.getRules("data/users/%s/content.json" % auth_address) + if rules and rules.get("max_size"): + quota = rules["max_size"] / 1024 + try: + content = site.content_manager.contents["data/users/%s/content.json" % auth_address] + used = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) + except: + used = 0 + used = used / 1024 + else: + quota = used = 0 + + body.append(_(u""" +
  • + +
    + {auth_address} + {_[Change]} +
    +
  • + """)) + + def sidebarRenderControls(self, body, site): + auth_address = self.user.getAuthAddress(self.site.address) + if self.site.settings["serving"]: + class_pause = "" + class_resume = "hidden" + else: + class_pause = "hidden" + class_resume = "" + + body.append(_(u""" +
  • + + {_[Update]} + {_[Pause]} + {_[Resume]} + {_[Delete]} +
  • + """)) + + site_address = self.site.address + body.append(_(u""" +
  • +
    +
    + {site_address} + {_[Donate]} +
    +
  • + """)) + + def sidebarRenderOwnedCheckbox(self, body, site): + if self.site.settings["own"]: + checked = "checked='checked'" + else: + checked = "" + + body.append(_(u""" +

    {_[This is my site]}

    +
    + """)) + + def sidebarRenderOwnSettings(self, body, site): + title = cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True) + description = cgi.escape(site.content_manager.contents.get("content.json", {}).get("description", ""), True) + privatekey = cgi.escape(self.user.getSiteData(site.address, create=False).get("privatekey", "")) + + body.append(_(u""" +
  • + + +
  • + +
  • + + +
  • + +
  • + + +
  • + +
  • + {_[Save site settings]} +
  • + """)) + + def sidebarRenderContents(self, body, site): + body.append(_(u""" +
  • + + """)) + + # Choose content you want to sign + contents = ["content.json"] + contents += site.content_manager.contents.get("content.json", {}).get("includes", {}).keys() + body.append(_(u"
    {_[Choose]}: ")) + for content in contents: + content = cgi.escape(content, True) + body.append(_("{content} ")) + body.append("
    ") + + body.append(_(u""" + +
  • + """)) + + def actionSidebarGetHtmlTag(self, to): + site = self.site + + body = [] + + body.append("
    ") + body.append("

    %s

    " % cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True)) + + body.append("
    ") + + body.append("
      ") + + self.sidebarRenderPeerStats(body, site) + self.sidebarRenderTransferStats(body, site) + self.sidebarRenderFileStats(body, site) + self.sidebarRenderSizeLimit(body, site) + has_optional = self.sidebarRenderOptionalFileStats(body, site) + if has_optional: + self.sidebarRenderOptionalFileSettings(body, site) + self.sidebarRenderDbOptions(body, site) + self.sidebarRenderIdentity(body, site) + self.sidebarRenderControls(body, site) + if site.bad_files: + self.sidebarRenderBadFiles(body, site) + + self.sidebarRenderOwnedCheckbox(body, site) + body.append("
      ") + self.sidebarRenderOwnSettings(body, site) + self.sidebarRenderContents(body, site) + body.append("
      ") + body.append("
    ") + body.append("
    ") + + self.response(to, "".join(body)) + + def downloadGeoLiteDb(self, db_path): + import urllib + import gzip + import shutil + from util import helper + + self.log.info("Downloading GeoLite2 City database...") + self.cmd("notification", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], 0]) + db_urls = [ + "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", + "https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz" + ] + for db_url in db_urls: + try: + # Download + response = helper.httpRequest(db_url) + + data = StringIO.StringIO() + while True: + buff = response.read(1024 * 512) + if not buff: + break + data.write(buff) + self.log.info("GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell()) + data.seek(0) + + # Unpack + with gzip.GzipFile(fileobj=data) as gzip_file: + shutil.copyfileobj(gzip_file, open(db_path, "wb")) + + self.cmd("notification", ["geolite-done", _["GeoLite2 City database downloaded!"], 5000]) + time.sleep(2) # Wait for notify animation + return True + except Exception, err: + self.log.error("Error downloading %s: %s" % (db_url, err)) + pass + self.cmd("notification", [ + "geolite-error", + _["GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}"].format(err, db_urls[0]), + 0 + ]) + + def actionSidebarGetPeers(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + try: + import maxminddb + db_path = config.data_dir + '/GeoLite2-City.mmdb' + if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: + if not self.downloadGeoLiteDb(db_path): + return False + geodb = maxminddb.open_database(db_path) + + peers = self.site.peers.values() + # Find avg ping + ping_times = [ + peer.connection.last_ping_delay + for peer in peers + if peer.connection and peer.connection.last_ping_delay and peer.connection.last_ping_delay + ] + if ping_times: + ping_avg = sum(ping_times) / float(len(ping_times)) + else: + ping_avg = 0 + # Place bars + globe_data = [] + placed = {} # Already placed bars here + for peer in peers: + # Height of bar + if peer.connection and peer.connection.last_ping_delay: + ping = min(0.20, math.log(1 + peer.connection.last_ping_delay / ping_avg, 300)) + else: + ping = -0.03 + + # Query and cache location + if peer.ip in loc_cache: + loc = loc_cache[peer.ip] + else: + try: + loc = geodb.get(peer.ip) + except: + loc = None + loc_cache[peer.ip] = loc + if not loc or "location" not in loc: + continue + + # Create position array + lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"]) + latlon = "%s,%s" % (lat, lon) + if latlon in placed: # Dont place more than 1 bar to same place, fake repos using ip address last two part + lat += float(128 - int(peer.ip.split(".")[-2])) / 50 + lon += float(128 - int(peer.ip.split(".")[-1])) / 50 + latlon = "%s,%s" % (lat, lon) + placed[latlon] = True + + globe_data += (lat, lon, ping) + # Append myself + loc = geodb.get(config.ip_external) + if loc and loc.get("location"): + lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"]) + globe_data += (lat, lon, -0.135) + + self.response(to, globe_data) + except Exception, err: + self.log.debug("sidebarGetPeers error: %s" % Debug.formatException(err)) + self.response(to, {"error": err}) + + def actionSiteSetOwned(self, to, owned): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.settings["own"] = bool(owned) + + def actionSiteSetAutodownloadoptional(self, to, owned): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.settings["autodownloadoptional"] = bool(owned) + self.site.bad_files = {} + gevent.spawn(self.site.update, check_files=True) + self.site.worker_manager.removeGoodFileTasks() + + def actionDbReload(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.storage.closeDb() + self.site.storage.getDb() + + return self.response(to, "ok") + + def actionDbRebuild(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.storage.rebuildDb() + + return self.response(to, "ok") diff --git a/plugins/Sidebar/__init__.py b/plugins/Sidebar/__init__.py new file mode 100644 index 00000000..8b61cb4a --- /dev/null +++ b/plugins/Sidebar/__init__.py @@ -0,0 +1 @@ +import SidebarPlugin \ No newline at end of file diff --git a/plugins/Sidebar/languages/da.json b/plugins/Sidebar/languages/da.json new file mode 100644 index 00000000..a421292c --- /dev/null +++ b/plugins/Sidebar/languages/da.json @@ -0,0 +1,81 @@ +{ + "Peers": "Klienter", + "Connected": "Forbundet", + "Connectable": "Mulige", + "Connectable peers": "Mulige klienter", + + "Data transfer": "Data overførsel", + "Received": "Modtaget", + "Received bytes": "Bytes modtaget", + "Sent": "Sendt", + "Sent bytes": "Bytes sendt", + + "Files": "Filer", + "Total": "I alt", + "Image": "Image", + "Other": "Andet", + "User data": "Bruger data", + + "Size limit": "Side max størrelse", + "limit used": "brugt", + "free space": "fri", + "Set": "Opdater", + + "Optional files": "Valgfri filer", + "Downloaded": "Downloadet", + "Download and help distribute all files": "Download og hjælp med at dele filer", + "Total size": "Størrelse i alt", + "Downloaded files": "Filer downloadet", + + "Database": "Database", + "search feeds": "søgninger", + "{feeds} query": "{feeds} søgninger", + "Reload": "Genindlæs", + "Rebuild": "Genopbyg", + "No database found": "Ingen database fundet", + + "Identity address": "Autorisations ID", + "Change": "Skift", + + "Update": "Opdater", + "Pause": "Pause", + "Resume": "Aktiv", + "Delete": "Slet", + "Are you sure?": "Er du sikker?", + + "Site address": "Side addresse", + "Donate": "Doner penge", + + "Missing files": "Manglende filer", + "{} try": "{} forsøg", + "{} tries": "{} forsøg", + "+ {num_bad_files} more": "+ {num_bad_files} mere", + + "This is my site": "Dette er min side", + "Site title": "Side navn", + "Site description": "Side beskrivelse", + "Save site settings": "Gem side opsætning", + + "Content publishing": "Indhold offentliggøres", + "Choose": "Vælg", + "Sign": "Signer", + "Publish": "Offentliggør", + + "This function is disabled on this proxy": "Denne funktion er slået fra på denne ZeroNet proxyEz a funkció ki van kapcsolva ezen a proxy-n", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City database kunne ikke downloades: {}!
    Download venligst databasen manuelt og udpak i data folder:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 City database downloadet!", + + "Are you sure?": "Er du sikker?", + "Site storage limit modified!": "Side max størrelse ændret!", + "Database schema reloaded!": "Database definition genindlæst!", + "Database rebuilding....": "Genopbygger database...", + "Database rebuilt!": "Database genopbygget!", + "Site updated!": "Side opdateret!", + "Delete this site": "Slet denne side", + "File write error: ": "Fejl ved skrivning af fil: ", + "Site settings saved!": "Side opsætning gemt!", + "Enter your private key:": "Indtast din private nøgle:", + " Signed!": " Signeret!", + "WebGL not supported": "WebGL er ikke supporteret" +} \ No newline at end of file diff --git a/plugins/Sidebar/languages/de.json b/plugins/Sidebar/languages/de.json new file mode 100644 index 00000000..2f5feacd --- /dev/null +++ b/plugins/Sidebar/languages/de.json @@ -0,0 +1,81 @@ +{ + "Peers": "Peers", + "Connected": "Verbunden", + "Connectable": "Verbindbar", + "Connectable peers": "Verbindbare Peers", + + "Data transfer": "Datei Transfer", + "Received": "Empfangen", + "Received bytes": "Empfangene Bytes", + "Sent": "Gesendet", + "Sent bytes": "Gesendete Bytes", + + "Files": "Dateien", + "Total": "Gesamt", + "Image": "Bilder", + "Other": "Sonstiges", + "User data": "Nutzer Daten", + + "Size limit": "Speicher Limit", + "limit used": "Limit benutzt", + "free space": "freier Speicher", + "Set": "Setzten", + + "Optional files": "Optionale Dateien", + "Downloaded": "Heruntergeladen", + "Download and help distribute all files": "Herunterladen und helfen alle Dateien zu verteilen", + "Total size": "Gesamte Größe", + "Downloaded files": "Heruntergeladene Dateien", + + "Database": "Datenbank", + "search feeds": "Feeds durchsuchen", + "{feeds} query": "{feeds} Abfrage", + "Reload": "Neu laden", + "Rebuild": "Neu bauen", + "No database found": "Keine Datenbank gefunden", + + "Identity address": "Identitäts Adresse", + "Change": "Ändern", + + "Update": "Aktualisieren", + "Pause": "Pausieren", + "Resume": "Fortsetzen", + "Delete": "Löschen", + "Are you sure?": "Bist du sicher?", + + "Site address": "Seiten Adresse", + "Donate": "Spenden", + + "Missing files": "Fehlende Dateien", + "{} try": "{} versuch", + "{} tries": "{} versuche", + "+ {num_bad_files} more": "+ {num_bad_files} mehr", + + "This is my site": "Das ist meine Seite", + "Site title": "Seiten Titel", + "Site description": "Seiten Beschreibung", + "Save site settings": "Einstellungen der Seite speichern", + + "Content publishing": "Inhaltsveröffentlichung", + "Choose": "Wähle", + "Sign": "Signieren", + "Publish": "Veröffentlichen", + + "This function is disabled on this proxy": "Diese Funktion ist auf dieser Proxy deaktiviert", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City Datenbank Download Fehler: {}!
    Bitte manuell herunterladen und die Datei in das Datei Verzeichnis extrahieren:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Herunterladen der GeoLite2 City Datenbank (einmalig, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 City Datenbank heruntergeladen!", + + "Are you sure?": "Bist du sicher?", + "Site storage limit modified!": "Speicher Limit der Seite modifiziert!", + "Database schema reloaded!": "Datebank Schema neu geladen!", + "Database rebuilding....": "Datenbank neu bauen...", + "Database rebuilt!": "Datenbank neu gebaut!", + "Site updated!": "Seite aktualisiert!", + "Delete this site": "Diese Seite löschen", + "File write error: ": "Datei schreib fehler:", + "Site settings saved!": "Seiten Einstellungen gespeichert!", + "Enter your private key:": "Gib deinen privaten Schlüssel ein:", + " Signed!": " Signiert!", + "WebGL not supported": "WebGL nicht unterstützt" +} diff --git a/plugins/Sidebar/languages/es.json b/plugins/Sidebar/languages/es.json new file mode 100644 index 00000000..b9e98c46 --- /dev/null +++ b/plugins/Sidebar/languages/es.json @@ -0,0 +1,79 @@ +{ + "Peers": "Pares", + "Connected": "Conectados", + "Connectable": "Conectables", + "Connectable peers": "Pares conectables", + + "Data transfer": "Transferencia de datos", + "Received": "Recibidos", + "Received bytes": "Bytes recibidos", + "Sent": "Enviados", + "Sent bytes": "Bytes envidados", + + "Files": "Ficheros", + "Total": "Total", + "Image": "Imagen", + "Other": "Otro", + "User data": "Datos del usuario", + + "Size limit": "Límite de tamaño", + "limit used": "Límite utilizado", + "free space": "Espacio libre", + "Set": "Establecer", + + "Optional files": "Ficheros opcionales", + "Downloaded": "Descargado", + "Download and help distribute all files": "Descargar y ayudar a distribuir todos los ficheros", + "Total size": "Tamaño total", + "Downloaded files": "Ficheros descargados", + + "Database": "Base de datos", + "search feeds": "Fuentes de búsqueda", + "{feeds} query": "{feeds} consulta", + "Reload": "Recargar", + "Rebuild": "Reconstruir", + "No database found": "No se ha encontrado la base de datos", + + "Identity address": "Dirección de la identidad", + "Change": "Cambiar", + + "Update": "Actualizar", + "Pause": "Pausar", + "Resume": "Reanudar", + "Delete": "Borrar", + + "Site address": "Dirección del sitio", + "Donate": "Donar", + + "Missing files": "Ficheros perdidos", + "{} try": "{} intento", + "{} tries": "{} intentos", + "+ {num_bad_files} more": "+ {num_bad_files} más", + + "This is my site": "Este es mi sitio", + "Site title": "Título del sitio", + "Site description": "Descripción del sitio", + "Save site settings": "Guardar la configuración del sitio", + + "Content publishing": "Publicación del contenido", + "Choose": "Elegir", + "Sign": "Firmar", + "Publish": "Publicar", + "This function is disabled on this proxy": "Esta función está deshabilitada en este proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "¡Error de la base de datos GeoLite2: {}!
    Por favor, descárgalo manualmente y descomprime al directorio de datos:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Descargando la base de datos de GeoLite2 (una única vez, ~20MB)...", + "GeoLite2 City database downloaded!": "¡Base de datos de GeoLite2 descargada!", + + "Are you sure?": "¿Estás seguro?", + "Site storage limit modified!": "¡Límite de almacenamiento del sitio modificado!", + "Database schema reloaded!": "¡Esquema de la base de datos recargado!", + "Database rebuilding....": "Reconstruyendo la base de datos...", + "Database rebuilt!": "¡Base de datos reconstruida!", + "Site updated!": "¡Sitio actualizado!", + "Delete this site": "Borrar este sitio", + "File write error: ": "Error de escritura de fichero:", + "Site settings saved!": "¡Configuración del sitio guardada!", + "Enter your private key:": "Introduce tu clave privada:", + " Signed!": " ¡firmado!", + "WebGL not supported": "WebGL no está soportado" +} diff --git a/plugins/Sidebar/languages/fr.json b/plugins/Sidebar/languages/fr.json new file mode 100644 index 00000000..5c4b3ac7 --- /dev/null +++ b/plugins/Sidebar/languages/fr.json @@ -0,0 +1,82 @@ +{ + "Peers": "Pairs", + "Connected": "Connectés", + "Connectable": "Accessibles", + "Connectable peers": "Pairs accessibles", + + "Data transfer": "Données transférées", + "Received": "Reçues", + "Received bytes": "Bytes reçus", + "Sent": "Envoyées", + "Sent bytes": "Bytes envoyés", + + "Files": "Fichiers", + "Total": "Total", + "Image": "Image", + "Other": "Autre", + "User data": "Utilisateurs", + + "Size limit": "Taille maximale", + "limit used": "utlisé", + "free space": "libre", + "Set": "Modifier", + + "Optional files": "Fichiers optionnels", + "Downloaded": "Téléchargé", + "Download and help distribute all files": "Télécharger et distribuer tous les fichiers", + "Total size": "Taille totale", + "Downloaded files": "Fichiers téléchargés", + + "Database": "Base de données", + "search feeds": "recherche", + "{feeds} query": "{feeds} requête", + "Reload": "Recharger", + "Rebuild": "Reconstruire", + "No database found": "Aucune base de données trouvée", + + "Identity address": "Adresse d'identité", + "Change": "Modifier", + + "Site control": "Opérations", + "Update": "Mettre à jour", + "Pause": "Suspendre", + "Resume": "Reprendre", + "Delete": "Supprimer", + "Are you sure?": "Êtes-vous certain?", + + "Site address": "Adresse du site", + "Donate": "Faire un don", + + "Missing files": "Fichiers manquants", + "{} try": "{} essai", + "{} tries": "{} essais", + "+ {num_bad_files} more": "+ {num_bad_files} manquants", + + "This is my site": "Ce site m'appartient", + "Site title": "Nom du site", + "Site description": "Description du site", + "Save site settings": "Enregistrer les paramètres", + + "Content publishing": "Publication du contenu", + "Choose": "Sélectionner", + "Sign": "Signer", + "Publish": "Publier", + + "This function is disabled on this proxy": "Cette fonction est désactivé sur ce proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erreur au téléchargement de la base de données GeoLite2: {}!
    Téléchargez et décompressez dans le dossier data:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Téléchargement de la base de données GeoLite2 (une seule fois, ~20MB)...", + "GeoLite2 City database downloaded!": "Base de données GeoLite2 téléchargée!", + + "Are you sure?": "Êtes-vous certain?", + "Site storage limit modified!": "Taille maximale modifiée!", + "Database schema reloaded!": "Base de données rechargée!", + "Database rebuilding....": "Reconstruction de la base de données...", + "Database rebuilt!": "Base de données reconstruite!", + "Site updated!": "Site mis à jour!", + "Delete this site": "Supprimer ce site", + "File write error: ": "Erreur à l'écriture du fichier: ", + "Site settings saved!": "Paramètres du site enregistrés!", + "Enter your private key:": "Entrez votre clé privée:", + " Signed!": " Signé!", + "WebGL not supported": "WebGL n'est pas supporté" +} diff --git a/plugins/Sidebar/languages/hu.json b/plugins/Sidebar/languages/hu.json new file mode 100644 index 00000000..40ed8fab --- /dev/null +++ b/plugins/Sidebar/languages/hu.json @@ -0,0 +1,82 @@ +{ + "Peers": "Csatlakozási pontok", + "Connected": "Csaltakozva", + "Connectable": "Csatlakozható", + "Connectable peers": "Csatlakozható peer-ek", + + "Data transfer": "Adatátvitel", + "Received": "Fogadott", + "Received bytes": "Fogadott byte-ok", + "Sent": "Küldött", + "Sent bytes": "Küldött byte-ok", + + "Files": "Fájlok", + "Total": "Összesen", + "Image": "Kép", + "Other": "Egyéb", + "User data": "Felh. adat", + + "Size limit": "Méret korlát", + "limit used": "felhasznált", + "free space": "szabad hely", + "Set": "Beállít", + + "Optional files": "Opcionális fájlok", + "Downloaded": "Letöltött", + "Download and help distribute all files": "Minden opcionális fájl letöltése", + "Total size": "Teljes méret", + "Downloaded files": "Letöltve", + + "Database": "Adatbázis", + "search feeds": "Keresés források", + "{feeds} query": "{feeds} lekérdezés", + "Reload": "Újratöltés", + "Rebuild": "Újraépítés", + "No database found": "Adatbázis nem található", + + "Identity address": "Azonosító cím", + "Change": "Módosít", + + "Site control": "Oldal műveletek", + "Update": "Frissít", + "Pause": "Szünteltet", + "Resume": "Folytat", + "Delete": "Töröl", + "Are you sure?": "Biztos vagy benne?", + + "Site address": "Oldal címe", + "Donate": "Támogatás", + + "Missing files": "Hiányzó fájlok", + "{} try": "{} próbálkozás", + "{} tries": "{} próbálkozás", + "+ {num_bad_files} more": "+ még {num_bad_files} darab", + + "This is my site": "Ez az én oldalam", + "Site title": "Oldal neve", + "Site description": "Oldal leírása", + "Save site settings": "Oldal beállítások mentése", + + "Content publishing": "Tartalom publikálás", + "Choose": "Válassz", + "Sign": "Aláírás", + "Publish": "Publikálás", + + "This function is disabled on this proxy": "Ez a funkció ki van kapcsolva ezen a proxy-n", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 város adatbázis letöltési hiba: {}!
    A térképhez töltsd le és csomagold ki a data könyvtárba:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 város adatbázis letöltve!", + + "Are you sure?": "Biztos vagy benne?", + "Site storage limit modified!": "Az oldalt méret korlát módosítva!", + "Database schema reloaded!": "Adatbázis séma újratöltve!", + "Database rebuilding....": "Adatbázis újraépítés...", + "Database rebuilt!": "Adatbázis újraépítve!", + "Site updated!": "Az oldal frissítve!", + "Delete this site": "Az oldal törlése", + "File write error: ": "Fájl írási hiba: ", + "Site settings saved!": "Az oldal beállításai elmentve!", + "Enter your private key:": "Add meg a prviát kulcsod:", + " Signed!": " Aláírva!", + "WebGL not supported": "WebGL nem támogatott" +} \ No newline at end of file diff --git a/plugins/Sidebar/languages/it.json b/plugins/Sidebar/languages/it.json new file mode 100644 index 00000000..6aa0969a --- /dev/null +++ b/plugins/Sidebar/languages/it.json @@ -0,0 +1,81 @@ +{ + "Peers": "Peer", + "Connected": "Connessi", + "Connectable": "Collegabili", + "Connectable peers": "Peer collegabili", + + "Data transfer": "Trasferimento dati", + "Received": "Ricevuti", + "Received bytes": "Byte ricevuti", + "Sent": "Inviati", + "Sent bytes": "Byte inviati", + + "Files": "File", + "Total": "Totale", + "Image": "Imagine", + "Other": "Altro", + "User data": "Dati utente", + + "Size limit": "Limite dimensione", + "limit used": "limite usato", + "free space": "spazio libero", + "Set": "Imposta", + + "Optional files": "File facoltativi", + "Downloaded": "Scaricati", + "Download and help distribute all files": "Scarica e aiuta a distribuire tutti i file", + "Total size": "Dimensione totale", + "Downloaded files": "File scaricati", + + "Database": "Database", + "search feeds": "ricerca di feed", + "{feeds} query": "{feeds} interrogazione", + "Reload": "Ricaricare", + "Rebuild": "Ricostruire", + "No database found": "Nessun database trovato", + + "Identity address": "Indirizzo di identità", + "Change": "Cambia", + + "Update": "Aggiorna", + "Pause": "Sospendi", + "Resume": "Riprendi", + "Delete": "Cancella", + "Are you sure?": "Sei sicuro?", + + "Site address": "Indirizzo sito", + "Donate": "Dona", + + "Missing files": "File mancanti", + "{} try": "{} tenta", + "{} tries": "{} prova", + "+ {num_bad_files} more": "+ {num_bad_files} altri", + + "This is my site": "Questo è il mio sito", + "Site title": "Titolo sito", + "Site description": "Descrizione sito", + "Save site settings": "Salva impostazioni sito", + + "Content publishing": "Pubblicazione contenuto", + "Choose": "Scegli", + "Sign": "Firma", + "Publish": "Pubblica", + + "This function is disabled on this proxy": "Questa funzione è disabilitata su questo proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Errore scaricamento database GeoLite2 City: {}!
    Si prega di scaricarlo manualmente e spacchetarlo nella cartella dir:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Scaricamento database GeoLite2 City (solo una volta, ~20MB)...", + "GeoLite2 City database downloaded!": "Database GeoLite2 City scaricato!", + + "Are you sure?": "Sei sicuro?", + "Site storage limit modified!": "Limite di archiviazione del sito modificato!", + "Database schema reloaded!": "Schema database ricaricato!", + "Database rebuilding....": "Ricostruzione database...", + "Database rebuilt!": "Database ricostruito!", + "Site updated!": "Sito aggiornato!", + "Delete this site": "Cancella questo sito", + "File write error: ": "Errore scrittura file:", + "Site settings saved!": "Impostazioni sito salvate!", + "Enter your private key:": "Inserisci la tua chiave privata:", + " Signed!": " Firmato!", + "WebGL not supported": "WebGL non supportato" +} diff --git a/plugins/Sidebar/languages/pl.json b/plugins/Sidebar/languages/pl.json new file mode 100644 index 00000000..93268507 --- /dev/null +++ b/plugins/Sidebar/languages/pl.json @@ -0,0 +1,82 @@ +{ + "Peers": "Użytkownicy równorzędni", + "Connected": "Połączony", + "Connectable": "Możliwy do podłączenia", + "Connectable peers": "Połączeni użytkownicy równorzędni", + + "Data transfer": "Transfer danych", + "Received": "Odebrane", + "Received bytes": "Odebrany bajty", + "Sent": "Wysłane", + "Sent bytes": "Wysłane bajty", + + "Files": "Pliki", + "Total": "Sumarycznie", + "Image": "Obraz", + "Other": "Inne", + "User data": "Dane użytkownika", + + "Size limit": "Rozmiar limitu", + "limit used": "zużyty limit", + "free space": "wolna przestrzeń", + "Set": "Ustaw", + + "Optional files": "Pliki opcjonalne", + "Downloaded": "Ściągnięte", + "Download and help distribute all files": "Ściągnij i pomóż rozpowszechniać wszystkie pliki", + "Total size": "Rozmiar sumaryczny", + "Downloaded files": "Ściągnięte pliki", + + "Database": "Baza danych", + "search feeds": "przeszukaj zasoby", + "{feeds} query": "{feeds} pytanie", + "Reload": "Odśwież", + "Rebuild": "Odbuduj", + "No database found": "Nie odnaleziono bazy danych", + + "Identity address": "Adres identyfikacyjny", + "Change": "Zmień", + + "Site control": "Kontrola strony", + "Update": "Zaktualizuj", + "Pause": "Wstrzymaj", + "Resume": "Wznów", + "Delete": "Skasuj", + "Are you sure?": "Jesteś pewien?", + + "Site address": "Adres strony", + "Donate": "Wspomóż", + + "Missing files": "Brakujące pliki", + "{} try": "{} próba", + "{} tries": "{} próby", + "+ {num_bad_files} more": "+ {num_bad_files} więcej", + + "This is my site": "To moja strona", + "Site title": "Tytuł strony", + "Site description": "Opis strony", + "Save site settings": "Zapisz ustawienia strony", + + "Content publishing": "Publikowanie treści", + "Choose": "Wybierz", + "Sign": "Podpisz", + "Publish": "Opublikuj", + + "This function is disabled on this proxy": "Ta funkcja jest zablokowana w tym proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Błąd ściągania bazy danych GeoLite2 City: {}!
    Proszę ściągnąć ją recznie i wypakować do katalogu danych:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Ściąganie bazy danych GeoLite2 City (tylko jednorazowo, ok. 20MB)...", + "GeoLite2 City database downloaded!": "Baza danych GeoLite2 City ściagnięta!", + + "Are you sure?": "Jesteś pewien?", + "Site storage limit modified!": "Limit pamięci strony zmodyfikowany!", + "Database schema reloaded!": "Schemat bazy danych załadowany ponownie!", + "Database rebuilding....": "Przebudowywanie bazy danych...", + "Database rebuilt!": "Baza danych przebudowana!", + "Site updated!": "Strona zaktualizowana!", + "Delete this site": "Usuń tę stronę", + "File write error: ": "Błąd zapisu pliku: ", + "Site settings saved!": "Ustawienia strony zapisane!", + "Enter your private key:": "Wpisz swój prywatny klucz:", + " Signed!": " Podpisane!", + "WebGL not supported": "WebGL nie jest obsługiwany" +} diff --git a/plugins/Sidebar/languages/pt-br.json b/plugins/Sidebar/languages/pt-br.json new file mode 100644 index 00000000..c1678043 --- /dev/null +++ b/plugins/Sidebar/languages/pt-br.json @@ -0,0 +1,81 @@ +{ + "Peers": "Peers", + "Connected": "Ligados", + "Connectable": "Disponíveis", + "Connectable peers": "Peers disponíveis", + + "Data transfer": "Transferência de dados", + "Received": "Recebidos", + "Received bytes": "Bytes recebidos", + "Sent": "Enviados", + "Sent bytes": "Dados enviados", + + "Files": "Arquivos", + "Total": "Total", + "Image": "Imagem", + "Other": "Outros", + "User data": "Dados do usuário", + + "Size limit": "Limite de tamanho", + "limit used": "limite utilizado", + "free space": "espaço livre", + "Set": "Definir", + + "Optional files": "Arquivos opcionais", + "Downloaded": "Baixados", + "Download and help distribute all files": "Baixar e ajudar a distribuir todos os arquivos", + "Total size": "Tamanho total", + "Downloaded files": "Arquivos baixados", + + "Database": "Base de dados", + "search feeds": "pesquisar feeds", + "{feeds} query": "consulta de {feeds}", + "Reload": "Recarregar", + "Rebuild": "Reconstruir", + "No database found": "Base de dados não encontrada", + + "Identity address": "Endereço de identidade", + "Change": "Alterar", + + "Update": "Atualizar", + "Pause": "Suspender", + "Resume": "Continuar", + "Delete": "Remover", + "Are you sure?": "Tem certeza?", + + "Site address": "Endereço do site", + "Donate": "Doar", + + "Missing files": "Arquivos em falta", + "{} try": "{} tentativa", + "{} tries": "{} tentativas", + "+ {num_bad_files} more": "+ {num_bad_files} adicionais", + + "This is my site": "Este é o meu site", + "Site title": "Título do site", + "Site description": "Descrição do site", + "Save site settings": "Salvar definições do site", + + "Content publishing": "Publicação do conteúdo", + "Choose": "Escolher", + "Sign": "Assinar", + "Publish": "Publicar", + + "This function is disabled on this proxy": "Esta função encontra-se desativada neste proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erro ao baixar a base de dados GeoLite2 City: {}!
    Por favor baixe manualmente e descompacte os dados para a seguinte pasta:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Baixando a base de dados GeoLite2 City (uma única vez, ~20MB)...", + "GeoLite2 City database downloaded!": "A base de dados GeoLite2 City foi baixada!", + + "Are you sure?": "Tem certeza?", + "Site storage limit modified!": "O limite de armazenamento do site foi modificado!", + "Database schema reloaded!": "O esquema da base de dados foi atualizado!", + "Database rebuilding....": "Reconstruindo base de dados...", + "Database rebuilt!": "Base de dados reconstruída!", + "Site updated!": "Site atualizado!", + "Delete this site": "Remover este site", + "File write error: ": "Erro de escrita de arquivo: ", + "Site settings saved!": "Definições do site salvas!", + "Enter your private key:": "Digite sua chave privada:", + " Signed!": " Assinado!", + "WebGL not supported": "WebGL não é suportado" +} diff --git a/plugins/Sidebar/languages/ru.json b/plugins/Sidebar/languages/ru.json new file mode 100644 index 00000000..f2eeca04 --- /dev/null +++ b/plugins/Sidebar/languages/ru.json @@ -0,0 +1,82 @@ +{ + "Peers": "Пиры", + "Connected": "Подключенные", + "Connectable": "Доступные", + "Connectable peers": "Пиры доступны для подключения", + + "Data transfer": "Передача данных", + "Received": "Получено", + "Received bytes": "Получено байн", + "Sent": "Отправлено", + "Sent bytes": "Отправлено байт", + + "Files": "Файлы", + "Total": "Всего", + "Image": "Изображений", + "Other": "Другое", + "User data": "Ваш контент", + + "Size limit": "Ограничение по размеру", + "limit used": "Использовано", + "free space": "Доступно", + "Set": "Установить", + + "Optional files": "Опциональные файлы", + "Downloaded": "Загружено", + "Download and help distribute all files": "Загрузить опциональные файлы для помощи сайту", + "Total size": "Объём", + "Downloaded files": "Загруженные файлы", + + "Database": "База данных", + "search feeds": "поиск подписок", + "{feeds} query": "{feeds} запрос", + "Reload": "Перезагрузить", + "Rebuild": "Перестроить", + "No database found": "База данных не найдена", + + "Identity address": "Уникальный адрес", + "Change": "Изменить", + + "Site control": "Управление сайтом", + "Update": "Обновить", + "Pause": "Пауза", + "Resume": "Продолжить", + "Delete": "Удалить", + "Are you sure?": "Вы уверены?", + + "Site address": "Адрес сайта", + "Donate": "Пожертвовать", + + "Missing files": "Отсутствующие файлы", + "{} try": "{} попробовать", + "{} tries": "{} попыток", + "+ {num_bad_files} more": "+ {num_bad_files} ещё", + + "This is my site": "Это мой сайт", + "Site title": "Название сайта", + "Site description": "Описание сайта", + "Save site settings": "Сохранить настройки сайта", + + "Content publishing": "Публикация контента", + "Choose": "Выбрать", + "Sign": "Подписать", + "Publish": "Опубликовать", + + "This function is disabled on this proxy": "Эта функция отключена на этом прокси", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Ошибка загрузки базы городов GeoLite2: {}!
    Пожалуйста, загрузите её вручную и распакуйте в папку:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Загрузка базы городов GeoLite2 (это делается только 1 раз, ~20MB)...", + "GeoLite2 City database downloaded!": "База GeoLite2 успешно загружена!", + + "Are you sure?": "Вы уверены?", + "Site storage limit modified!": "Лимит хранилища для сайта изменен!", + "Database schema reloaded!": "Схема базы данных перезагружена!", + "Database rebuilding....": "Перестройка базы данных...", + "Database rebuilt!": "База данных перестроена!", + "Site updated!": "Сайт обновлён!", + "Delete this site": "Удалить этот сайт", + "File write error: ": "Ошибка записи файла:", + "Site settings saved!": "Настройки сайта сохранены!", + "Enter your private key:": "Введите свой приватный ключ:", + " Signed!": " Подписано!", + "WebGL not supported": "WebGL не поддерживается" +} diff --git a/plugins/Sidebar/languages/tr.json b/plugins/Sidebar/languages/tr.json new file mode 100644 index 00000000..88fcd6e0 --- /dev/null +++ b/plugins/Sidebar/languages/tr.json @@ -0,0 +1,82 @@ +{ + "Peers": "Eşler", + "Connected": "Bağlı", + "Connectable": "Erişilebilir", + "Connectable peers": "Bağlanılabilir eşler", + + "Data transfer": "Veri aktarımı", + "Received": "Alınan", + "Received bytes": "Bayt alındı", + "Sent": "Gönderilen", + "Sent bytes": "Bayt gönderildi", + + "Files": "Dosyalar", + "Total": "Toplam", + "Image": "Resim", + "Other": "Diğer", + "User data": "Kullanıcı verisi", + + "Size limit": "Boyut sınırı", + "limit used": "kullanılan", + "free space": "boş", + "Set": "Ayarla", + + "Optional files": "İsteğe bağlı dosyalar", + "Downloaded": "İndirilen", + "Download and help distribute all files": "Tüm dosyaları indir ve yayılmalarına yardım et", + "Total size": "Toplam boyut", + "Downloaded files": "İndirilen dosyalar", + + "Database": "Veritabanı", + "search feeds": "kaynak ara", + "{feeds} query": "{feeds} sorgu", + "Reload": "Yenile", + "Rebuild": "Yapılandır", + "No database found": "Veritabanı yok", + + "Identity address": "Kimlik adresi", + "Change": "Değiştir", + + "Site control": "Site kontrolü", + "Update": "Güncelle", + "Pause": "Duraklat", + "Resume": "Sürdür", + "Delete": "Sil", + "Are you sure?": "Emin misin?", + + "Site address": "Site adresi", + "Donate": "Bağış yap", + + "Missing files": "Eksik dosyalar", + "{} try": "{} deneme", + "{} tries": "{} deneme", + "+ {num_bad_files} more": "+ {num_bad_files} tane daha", + + "This is my site": "Bu benim sitem", + "Site title": "Site başlığı", + "Site description": "Site açıklaması", + "Save site settings": "Site ayarlarını kaydet", + + "Content publishing": "İçerik yayımlanıyor", + "Choose": "Seç", + "Sign": "İmzala", + "Publish": "Yayımla", + + "This function is disabled on this proxy": "Bu özellik bu vekilde kullanılamaz", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Şehir veritabanı indirme hatası: {}!
    Lütfen kendiniz indirip aşağıdaki konuma açınınız:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Şehir veritabanı indiriliyor (sadece bir kere, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 Şehir veritabanı indirildi!", + + "Are you sure?": "Emin misiniz?", + "Site storage limit modified!": "Site saklama sınırı değiştirildi!", + "Database schema reloaded!": "Veritabanı şeması yeniden yüklendi!", + "Database rebuilding....": "Veritabanı yeniden inşa ediliyor...", + "Database rebuilt!": "Veritabanı yeniden inşa edildi!", + "Site updated!": "Site güncellendi!", + "Delete this site": "Bu siteyi sil", + "File write error: ": "Dosya yazma hatası: ", + "Site settings saved!": "Site ayarları kaydedildi!", + "Enter your private key:": "Özel anahtarınızı giriniz:", + " Signed!": " İmzala!", + "WebGL not supported": "WebGL desteklenmiyor" +} diff --git a/plugins/Sidebar/languages/zh-tw.json b/plugins/Sidebar/languages/zh-tw.json new file mode 100644 index 00000000..724de508 --- /dev/null +++ b/plugins/Sidebar/languages/zh-tw.json @@ -0,0 +1,81 @@ +{ + "Peers": "節點數", + "Connected": "已連線", + "Connectable": "可連線", + "Connectable peers": "可連線節點", + + "Data transfer": "數據傳輸", + "Received": "已收到", + "Received bytes": "收到字節", + "Sent": "已傳送", + "Sent bytes": "傳送字節", + + "Files": "檔", + "Total": "共計", + "Image": "圖片", + "Other": "其他", + "User data": "用戶數據", + + "Size limit": "大小限制", + "limit used": "限额", + "free space": "可用空間", + "Set": "設定", + + "Optional files": "可選文件", + "Downloaded": "已下載", + "Download and help distribute all files": "下載並幫助分發所有文件", + "Total size": "總大小", + "Downloaded files": "下載的文件", + + "Database": "資料庫", + "search feeds": "搜尋供稿", + "{feeds} query": "{feeds} 查詢 ", + "Reload": "重載", + "Rebuild": "重建", + "No database found": "未找到資料庫", + + "Identity address": "身份地址", + "Change": "改變", + + "Site control": "網站控制", + "Update": "更新", + "Pause": "暫停", + "Resume": "恢復", + "Delete": "刪除", + "Are you sure?": "你確定?", + + "Site address": "網站地址", + "Donate": "捐贈", + + "Missing files": "缺少的檔", + "{} try": "{} 嘗試", + "{} tries": "{} 已嘗試", + "+ {num_bad_files} more": "+ {num_bad_files} 更多", + + "This is my site": "這是我的網站", + "Site title": "網站標題", + "Site description": "網站描寫", + "Save site settings": "存儲網站設定", + + "Content publishing": "內容髮布", + "Choose": "選擇", + "Sign": "簽署", + "Publish": "發佈", + "This function is disabled on this proxy": "此代理上禁用此功能", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置資料庫下載錯誤:{}!
    請手動下載並解壓到數據目錄:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下載 GeoLite2 地理位置資料庫 (僅一次,約 20MB )...", + "GeoLite2 City database downloaded!": "GeoLite2 地理位置資料庫已下載!", + + "Are you sure?": "你確定?", + "Site storage limit modified!": "網站存儲限制已改變!", + "Database schema reloaded!": "資料庫架構重新加載!", + "Database rebuilding....": "資料庫重建中...", + "Database rebuilt!": "資料庫已重建!", + "Site updated!": "網站已更新!", + "Delete this site": "刪除此網站", + "File write error: ": "檔寫入錯誤:", + "Site settings saved!": "網站設置已保存!", + "Enter your private key:": "輸入您的私鑰:", + " Signed!": " 已簽署!", + "WebGL not supported": "不支持 WebGL" +} diff --git a/plugins/Sidebar/languages/zh.json b/plugins/Sidebar/languages/zh.json new file mode 100644 index 00000000..55ad3acd --- /dev/null +++ b/plugins/Sidebar/languages/zh.json @@ -0,0 +1,81 @@ +{ + "Peers": "节点数", + "Connected": "已连接", + "Connectable": "可连接", + "Connectable peers": "可连接节点", + + "Data transfer": "数据传输", + "Received": "已接收", + "Received bytes": "已接收字节", + "Sent": "已发送", + "Sent bytes": "已发送字节", + + "Files": "文件", + "Total": "总计", + "Image": "图像", + "Other": "其他", + "User data": "用户数据", + + "Size limit": "大小限制", + "limit used": "限额", + "free space": "剩余空间", + "Set": "设置", + + "Optional files": "可选文件", + "Downloaded": "已下载", + "Download and help distribute all files": "下载并帮助分发所有文件", + "Total size": "总大小", + "Downloaded files": "已下载文件", + + "Database": "数据库", + "search feeds": "搜索数据源", + "{feeds} query": "{feeds} 请求", + "Reload": "重载", + "Rebuild": "重建", + "No database found": "没有找到数据库", + + "Identity address": "身份地址", + "Change": "更改", + + "Site control": "站点控制", + "Update": "更新", + "Pause": "暂停", + "Resume": "恢复", + "Delete": "删除", + "Are you sure?": "你确定吗?", + + "Site address": "站点地址", + "Donate": "捐赠", + + "Missing files": "丢失的文件", + "{} try": "{} 尝试", + "{} tries": "{} 已尝试", + "+ {num_bad_files} more": "+ {num_bad_files} 更多", + + "This is my site": "这是我的站点", + "Site title": "站点标题", + "Site description": "站点描述", + "Save site settings": "保存站点设置", + + "Content publishing": "内容发布", + "Choose": "选择", + "Sign": "签名", + "Publish": "发布", + "This function is disabled on this proxy": "此功能在代理上被禁用", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置数据库下载错误:{}!
    请手动下载并解压在数据目录:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下载 GeoLite2 地理位置数据库 (仅需一次,约 20MB )...", + "GeoLite2 City database downloaded!": "GeoLite2 地理位置数据库已下载!", + + "Are you sure?": "你确定吗?", + "Site storage limit modified!": "站点存储限制已更改!", + "Database schema reloaded!": "数据库模式已重新加载!", + "Database rebuilding....": "数据库重建中...", + "Database rebuilt!": "数据库已重建!", + "Site updated!": "站点已更新!", + "Delete this site": "删除这个站点", + "File write error: ": "文件写入错误:", + "Site settings saved!": "站点设置已保存!", + "Enter your private key:": "输入你的私钥:", + " Signed!": " 已签名!", + "WebGL not supported": "不支持 WebGL" +} diff --git a/plugins/Sidebar/maxminddb/__init__.py b/plugins/Sidebar/maxminddb/__init__.py new file mode 100644 index 00000000..fc28186b --- /dev/null +++ b/plugins/Sidebar/maxminddb/__init__.py @@ -0,0 +1,46 @@ +# pylint:disable=C0111 +import os + +import maxminddb.reader + +try: + import maxminddb.extension +except ImportError: + maxminddb.extension = None + +from maxminddb.const import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE, + MODE_MEMORY) +from maxminddb.decoder import InvalidDatabaseError + + +def open_database(database, mode=MODE_AUTO): + """Open a Maxmind DB database + + Arguments: + database -- A path to a valid MaxMind DB file such as a GeoIP2 + database file. + mode -- mode to open the database with. Valid mode are: + * MODE_MMAP_EXT - use the C extension with memory map. + * MODE_MMAP - read from memory map. Pure Python. + * MODE_FILE - read database as standard file. Pure Python. + * MODE_MEMORY - load database into memory. Pure Python. + * MODE_AUTO - tries MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that + order. Default mode. + """ + if (mode == MODE_AUTO and maxminddb.extension and + hasattr(maxminddb.extension, 'Reader')) or mode == MODE_MMAP_EXT: + return maxminddb.extension.Reader(database) + elif mode in (MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY): + return maxminddb.reader.Reader(database, mode) + raise ValueError('Unsupported open mode: {0}'.format(mode)) + + +def Reader(database): # pylint: disable=invalid-name + """This exists for backwards compatibility. Use open_database instead""" + return open_database(database) + +__title__ = 'maxminddb' +__version__ = '1.2.0' +__author__ = 'Gregory Oschwald' +__license__ = 'Apache License, Version 2.0' +__copyright__ = 'Copyright 2014 Maxmind, Inc.' diff --git a/plugins/Sidebar/maxminddb/compat.py b/plugins/Sidebar/maxminddb/compat.py new file mode 100644 index 00000000..14c98832 --- /dev/null +++ b/plugins/Sidebar/maxminddb/compat.py @@ -0,0 +1,28 @@ +import sys + +# pylint: skip-file + +if sys.version_info[0] == 2: + import ipaddr as ipaddress # pylint:disable=F0401 + ipaddress.ip_address = ipaddress.IPAddress + + int_from_byte = ord + + FileNotFoundError = IOError + + def int_from_bytes(b): + if b: + return int(b.encode("hex"), 16) + return 0 + + byte_from_int = chr +else: + import ipaddress # pylint:disable=F0401 + + int_from_byte = lambda x: x + + FileNotFoundError = FileNotFoundError + + int_from_bytes = lambda x: int.from_bytes(x, 'big') + + byte_from_int = lambda x: bytes([x]) diff --git a/plugins/Sidebar/maxminddb/const.py b/plugins/Sidebar/maxminddb/const.py new file mode 100644 index 00000000..59ea84b6 --- /dev/null +++ b/plugins/Sidebar/maxminddb/const.py @@ -0,0 +1,7 @@ +"""Constants used in the API""" + +MODE_AUTO = 0 +MODE_MMAP_EXT = 1 +MODE_MMAP = 2 +MODE_FILE = 4 +MODE_MEMORY = 8 diff --git a/plugins/Sidebar/maxminddb/decoder.py b/plugins/Sidebar/maxminddb/decoder.py new file mode 100644 index 00000000..1b8f0711 --- /dev/null +++ b/plugins/Sidebar/maxminddb/decoder.py @@ -0,0 +1,173 @@ +""" +maxminddb.decoder +~~~~~~~~~~~~~~~~~ + +This package contains code for decoding the MaxMind DB data section. + +""" +from __future__ import unicode_literals + +import struct + +from maxminddb.compat import byte_from_int, int_from_bytes +from maxminddb.errors import InvalidDatabaseError + + +class Decoder(object): # pylint: disable=too-few-public-methods + + """Decoder for the data section of the MaxMind DB""" + + def __init__(self, database_buffer, pointer_base=0, pointer_test=False): + """Created a Decoder for a MaxMind DB + + Arguments: + database_buffer -- an mmap'd MaxMind DB file. + pointer_base -- the base number to use when decoding a pointer + pointer_test -- used for internal unit testing of pointer code + """ + self._pointer_test = pointer_test + self._buffer = database_buffer + self._pointer_base = pointer_base + + def _decode_array(self, size, offset): + array = [] + for _ in range(size): + (value, offset) = self.decode(offset) + array.append(value) + return array, offset + + def _decode_boolean(self, size, offset): + return size != 0, offset + + def _decode_bytes(self, size, offset): + new_offset = offset + size + return self._buffer[offset:new_offset], new_offset + + # pylint: disable=no-self-argument + # |-> I am open to better ways of doing this as long as it doesn't involve + # lots of code duplication. + def _decode_packed_type(type_code, type_size, pad=False): + # pylint: disable=protected-access, missing-docstring + def unpack_type(self, size, offset): + if not pad: + self._verify_size(size, type_size) + new_offset = offset + type_size + packed_bytes = self._buffer[offset:new_offset] + if pad: + packed_bytes = packed_bytes.rjust(type_size, b'\x00') + (value,) = struct.unpack(type_code, packed_bytes) + return value, new_offset + return unpack_type + + def _decode_map(self, size, offset): + container = {} + for _ in range(size): + (key, offset) = self.decode(offset) + (value, offset) = self.decode(offset) + container[key] = value + return container, offset + + _pointer_value_offset = { + 1: 0, + 2: 2048, + 3: 526336, + 4: 0, + } + + def _decode_pointer(self, size, offset): + pointer_size = ((size >> 3) & 0x3) + 1 + new_offset = offset + pointer_size + pointer_bytes = self._buffer[offset:new_offset] + packed = pointer_bytes if pointer_size == 4 else struct.pack( + b'!c', byte_from_int(size & 0x7)) + pointer_bytes + unpacked = int_from_bytes(packed) + pointer = unpacked + self._pointer_base + \ + self._pointer_value_offset[pointer_size] + if self._pointer_test: + return pointer, new_offset + (value, _) = self.decode(pointer) + return value, new_offset + + def _decode_uint(self, size, offset): + new_offset = offset + size + uint_bytes = self._buffer[offset:new_offset] + return int_from_bytes(uint_bytes), new_offset + + def _decode_utf8_string(self, size, offset): + new_offset = offset + size + return self._buffer[offset:new_offset].decode('utf-8'), new_offset + + _type_decoder = { + 1: _decode_pointer, + 2: _decode_utf8_string, + 3: _decode_packed_type(b'!d', 8), # double, + 4: _decode_bytes, + 5: _decode_uint, # uint16 + 6: _decode_uint, # uint32 + 7: _decode_map, + 8: _decode_packed_type(b'!i', 4, pad=True), # int32 + 9: _decode_uint, # uint64 + 10: _decode_uint, # uint128 + 11: _decode_array, + 14: _decode_boolean, + 15: _decode_packed_type(b'!f', 4), # float, + } + + def decode(self, offset): + """Decode a section of the data section starting at offset + + Arguments: + offset -- the location of the data structure to decode + """ + new_offset = offset + 1 + (ctrl_byte,) = struct.unpack(b'!B', self._buffer[offset:new_offset]) + type_num = ctrl_byte >> 5 + # Extended type + if not type_num: + (type_num, new_offset) = self._read_extended(new_offset) + + if not type_num in self._type_decoder: + raise InvalidDatabaseError('Unexpected type number ({type}) ' + 'encountered'.format(type=type_num)) + + (size, new_offset) = self._size_from_ctrl_byte( + ctrl_byte, new_offset, type_num) + return self._type_decoder[type_num](self, size, new_offset) + + def _read_extended(self, offset): + (next_byte,) = struct.unpack(b'!B', self._buffer[offset:offset + 1]) + type_num = next_byte + 7 + if type_num < 7: + raise InvalidDatabaseError( + 'Something went horribly wrong in the decoder. An ' + 'extended type resolved to a type number < 8 ' + '({type})'.format(type=type_num)) + return type_num, offset + 1 + + def _verify_size(self, expected, actual): + if expected != actual: + raise InvalidDatabaseError( + 'The MaxMind DB file\'s data section contains bad data ' + '(unknown data type or corrupt data)' + ) + + def _size_from_ctrl_byte(self, ctrl_byte, offset, type_num): + size = ctrl_byte & 0x1f + if type_num == 1: + return size, offset + bytes_to_read = 0 if size < 29 else size - 28 + + new_offset = offset + bytes_to_read + size_bytes = self._buffer[offset:new_offset] + + # Using unpack rather than int_from_bytes as it is about 200 lookups + # per second faster here. + if size == 29: + size = 29 + struct.unpack(b'!B', size_bytes)[0] + elif size == 30: + size = 285 + struct.unpack(b'!H', size_bytes)[0] + elif size > 30: + size = struct.unpack( + b'!I', size_bytes.rjust(4, b'\x00'))[0] + 65821 + + return size, new_offset diff --git a/plugins/Sidebar/maxminddb/errors.py b/plugins/Sidebar/maxminddb/errors.py new file mode 100644 index 00000000..f04ff028 --- /dev/null +++ b/plugins/Sidebar/maxminddb/errors.py @@ -0,0 +1,11 @@ +""" +maxminddb.errors +~~~~~~~~~~~~~~~~ + +This module contains custom errors for the MaxMind DB reader +""" + + +class InvalidDatabaseError(RuntimeError): + + """This error is thrown when unexpected data is found in the database.""" diff --git a/plugins/Sidebar/maxminddb/extension/maxminddb.c b/plugins/Sidebar/maxminddb/extension/maxminddb.c new file mode 100644 index 00000000..9e4d45e2 --- /dev/null +++ b/plugins/Sidebar/maxminddb/extension/maxminddb.c @@ -0,0 +1,570 @@ +#include +#include +#include "structmember.h" + +#define __STDC_FORMAT_MACROS +#include + +static PyTypeObject Reader_Type; +static PyTypeObject Metadata_Type; +static PyObject *MaxMindDB_error; + +typedef struct { + PyObject_HEAD /* no semicolon */ + MMDB_s *mmdb; +} Reader_obj; + +typedef struct { + PyObject_HEAD /* no semicolon */ + PyObject *binary_format_major_version; + PyObject *binary_format_minor_version; + PyObject *build_epoch; + PyObject *database_type; + PyObject *description; + PyObject *ip_version; + PyObject *languages; + PyObject *node_count; + PyObject *record_size; +} Metadata_obj; + +static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list); +static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list); +static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list); +static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list); + +#if PY_MAJOR_VERSION >= 3 + #define MOD_INIT(name) PyMODINIT_FUNC PyInit_ ## name(void) + #define RETURN_MOD_INIT(m) return (m) + #define FILE_NOT_FOUND_ERROR PyExc_FileNotFoundError +#else + #define MOD_INIT(name) PyMODINIT_FUNC init ## name(void) + #define RETURN_MOD_INIT(m) return + #define PyInt_FromLong PyLong_FromLong + #define FILE_NOT_FOUND_ERROR PyExc_IOError +#endif + +#ifdef __GNUC__ + # define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) +#else + # define UNUSED(x) UNUSED_ ## x +#endif + +static int Reader_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + char *filename; + int mode = 0; + + static char *kwlist[] = {"database", "mode", NULL}; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|i", kwlist, &filename, &mode)) { + return -1; + } + + if (mode != 0 && mode != 1) { + PyErr_Format(PyExc_ValueError, "Unsupported open mode (%i). Only " + "MODE_AUTO and MODE_MMAP_EXT are supported by this extension.", + mode); + return -1; + } + + if (0 != access(filename, R_OK)) { + PyErr_Format(FILE_NOT_FOUND_ERROR, + "No such file or directory: '%s'", + filename); + return -1; + } + + MMDB_s *mmdb = (MMDB_s *)malloc(sizeof(MMDB_s)); + if (NULL == mmdb) { + PyErr_NoMemory(); + return -1; + } + + Reader_obj *mmdb_obj = (Reader_obj *)self; + if (!mmdb_obj) { + free(mmdb); + PyErr_NoMemory(); + return -1; + } + + uint16_t status = MMDB_open(filename, MMDB_MODE_MMAP, mmdb); + + if (MMDB_SUCCESS != status) { + free(mmdb); + PyErr_Format( + MaxMindDB_error, + "Error opening database file (%s). Is this a valid MaxMind DB file?", + filename + ); + return -1; + } + + mmdb_obj->mmdb = mmdb; + return 0; +} + +static PyObject *Reader_get(PyObject *self, PyObject *args) +{ + char *ip_address = NULL; + + Reader_obj *mmdb_obj = (Reader_obj *)self; + if (!PyArg_ParseTuple(args, "s", &ip_address)) { + return NULL; + } + + MMDB_s *mmdb = mmdb_obj->mmdb; + + if (NULL == mmdb) { + PyErr_SetString(PyExc_ValueError, + "Attempt to read from a closed MaxMind DB."); + return NULL; + } + + int gai_error = 0; + int mmdb_error = MMDB_SUCCESS; + MMDB_lookup_result_s result = + MMDB_lookup_string(mmdb, ip_address, &gai_error, + &mmdb_error); + + if (0 != gai_error) { + PyErr_Format(PyExc_ValueError, + "'%s' does not appear to be an IPv4 or IPv6 address.", + ip_address); + return NULL; + } + + if (MMDB_SUCCESS != mmdb_error) { + PyObject *exception; + if (MMDB_IPV6_LOOKUP_IN_IPV4_DATABASE_ERROR == mmdb_error) { + exception = PyExc_ValueError; + } else { + exception = MaxMindDB_error; + } + PyErr_Format(exception, "Error looking up %s. %s", + ip_address, MMDB_strerror(mmdb_error)); + return NULL; + } + + if (!result.found_entry) { + Py_RETURN_NONE; + } + + MMDB_entry_data_list_s *entry_data_list = NULL; + int status = MMDB_get_entry_data_list(&result.entry, &entry_data_list); + if (MMDB_SUCCESS != status) { + PyErr_Format(MaxMindDB_error, + "Error while looking up data for %s. %s", + ip_address, MMDB_strerror(status)); + MMDB_free_entry_data_list(entry_data_list); + return NULL; + } + + MMDB_entry_data_list_s *original_entry_data_list = entry_data_list; + PyObject *py_obj = from_entry_data_list(&entry_data_list); + MMDB_free_entry_data_list(original_entry_data_list); + return py_obj; +} + +static PyObject *Reader_metadata(PyObject *self, PyObject *UNUSED(args)) +{ + Reader_obj *mmdb_obj = (Reader_obj *)self; + + if (NULL == mmdb_obj->mmdb) { + PyErr_SetString(PyExc_IOError, + "Attempt to read from a closed MaxMind DB."); + return NULL; + } + + MMDB_entry_data_list_s *entry_data_list; + MMDB_get_metadata_as_entry_data_list(mmdb_obj->mmdb, &entry_data_list); + MMDB_entry_data_list_s *original_entry_data_list = entry_data_list; + + PyObject *metadata_dict = from_entry_data_list(&entry_data_list); + MMDB_free_entry_data_list(original_entry_data_list); + if (NULL == metadata_dict || !PyDict_Check(metadata_dict)) { + PyErr_SetString(MaxMindDB_error, + "Error decoding metadata."); + return NULL; + } + + PyObject *args = PyTuple_New(0); + if (NULL == args) { + Py_DECREF(metadata_dict); + return NULL; + } + + PyObject *metadata = PyObject_Call((PyObject *)&Metadata_Type, args, + metadata_dict); + + Py_DECREF(metadata_dict); + return metadata; +} + +static PyObject *Reader_close(PyObject *self, PyObject *UNUSED(args)) +{ + Reader_obj *mmdb_obj = (Reader_obj *)self; + + if (NULL != mmdb_obj->mmdb) { + MMDB_close(mmdb_obj->mmdb); + free(mmdb_obj->mmdb); + mmdb_obj->mmdb = NULL; + } + + Py_RETURN_NONE; +} + +static void Reader_dealloc(PyObject *self) +{ + Reader_obj *obj = (Reader_obj *)self; + if (NULL != obj->mmdb) { + Reader_close(self, NULL); + } + + PyObject_Del(self); +} + +static int Metadata_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + + PyObject + *binary_format_major_version, + *binary_format_minor_version, + *build_epoch, + *database_type, + *description, + *ip_version, + *languages, + *node_count, + *record_size; + + static char *kwlist[] = { + "binary_format_major_version", + "binary_format_minor_version", + "build_epoch", + "database_type", + "description", + "ip_version", + "languages", + "node_count", + "record_size", + NULL + }; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist, + &binary_format_major_version, + &binary_format_minor_version, + &build_epoch, + &database_type, + &description, + &ip_version, + &languages, + &node_count, + &record_size)) { + return -1; + } + + Metadata_obj *obj = (Metadata_obj *)self; + + obj->binary_format_major_version = binary_format_major_version; + obj->binary_format_minor_version = binary_format_minor_version; + obj->build_epoch = build_epoch; + obj->database_type = database_type; + obj->description = description; + obj->ip_version = ip_version; + obj->languages = languages; + obj->node_count = node_count; + obj->record_size = record_size; + + Py_INCREF(obj->binary_format_major_version); + Py_INCREF(obj->binary_format_minor_version); + Py_INCREF(obj->build_epoch); + Py_INCREF(obj->database_type); + Py_INCREF(obj->description); + Py_INCREF(obj->ip_version); + Py_INCREF(obj->languages); + Py_INCREF(obj->node_count); + Py_INCREF(obj->record_size); + + return 0; +} + +static void Metadata_dealloc(PyObject *self) +{ + Metadata_obj *obj = (Metadata_obj *)self; + Py_DECREF(obj->binary_format_major_version); + Py_DECREF(obj->binary_format_minor_version); + Py_DECREF(obj->build_epoch); + Py_DECREF(obj->database_type); + Py_DECREF(obj->description); + Py_DECREF(obj->ip_version); + Py_DECREF(obj->languages); + Py_DECREF(obj->node_count); + Py_DECREF(obj->record_size); + PyObject_Del(self); +} + +static PyObject *from_entry_data_list(MMDB_entry_data_list_s **entry_data_list) +{ + if (NULL == entry_data_list || NULL == *entry_data_list) { + PyErr_SetString( + MaxMindDB_error, + "Error while looking up data. Your database may be corrupt or you have found a bug in libmaxminddb." + ); + return NULL; + } + + switch ((*entry_data_list)->entry_data.type) { + case MMDB_DATA_TYPE_MAP: + return from_map(entry_data_list); + case MMDB_DATA_TYPE_ARRAY: + return from_array(entry_data_list); + case MMDB_DATA_TYPE_UTF8_STRING: + return PyUnicode_FromStringAndSize( + (*entry_data_list)->entry_data.utf8_string, + (*entry_data_list)->entry_data.data_size + ); + case MMDB_DATA_TYPE_BYTES: + return PyByteArray_FromStringAndSize( + (const char *)(*entry_data_list)->entry_data.bytes, + (Py_ssize_t)(*entry_data_list)->entry_data.data_size); + case MMDB_DATA_TYPE_DOUBLE: + return PyFloat_FromDouble((*entry_data_list)->entry_data.double_value); + case MMDB_DATA_TYPE_FLOAT: + return PyFloat_FromDouble((*entry_data_list)->entry_data.float_value); + case MMDB_DATA_TYPE_UINT16: + return PyLong_FromLong( (*entry_data_list)->entry_data.uint16); + case MMDB_DATA_TYPE_UINT32: + return PyLong_FromLong((*entry_data_list)->entry_data.uint32); + case MMDB_DATA_TYPE_BOOLEAN: + return PyBool_FromLong((*entry_data_list)->entry_data.boolean); + case MMDB_DATA_TYPE_UINT64: + return PyLong_FromUnsignedLongLong( + (*entry_data_list)->entry_data.uint64); + case MMDB_DATA_TYPE_UINT128: + return from_uint128(*entry_data_list); + case MMDB_DATA_TYPE_INT32: + return PyLong_FromLong((*entry_data_list)->entry_data.int32); + default: + PyErr_Format(MaxMindDB_error, + "Invalid data type arguments: %d", + (*entry_data_list)->entry_data.type); + return NULL; + } + return NULL; +} + +static PyObject *from_map(MMDB_entry_data_list_s **entry_data_list) +{ + PyObject *py_obj = PyDict_New(); + if (NULL == py_obj) { + PyErr_NoMemory(); + return NULL; + } + + const uint32_t map_size = (*entry_data_list)->entry_data.data_size; + + uint i; + // entry_data_list cannot start out NULL (see from_entry_data_list). We + // check it in the loop because it may become NULL. + // coverity[check_after_deref] + for (i = 0; i < map_size && entry_data_list; i++) { + *entry_data_list = (*entry_data_list)->next; + + PyObject *key = PyUnicode_FromStringAndSize( + (char *)(*entry_data_list)->entry_data.utf8_string, + (*entry_data_list)->entry_data.data_size + ); + + *entry_data_list = (*entry_data_list)->next; + + PyObject *value = from_entry_data_list(entry_data_list); + if (NULL == value) { + Py_DECREF(key); + Py_DECREF(py_obj); + return NULL; + } + PyDict_SetItem(py_obj, key, value); + Py_DECREF(value); + Py_DECREF(key); + } + + return py_obj; +} + +static PyObject *from_array(MMDB_entry_data_list_s **entry_data_list) +{ + const uint32_t size = (*entry_data_list)->entry_data.data_size; + + PyObject *py_obj = PyList_New(size); + if (NULL == py_obj) { + PyErr_NoMemory(); + return NULL; + } + + uint i; + // entry_data_list cannot start out NULL (see from_entry_data_list). We + // check it in the loop because it may become NULL. + // coverity[check_after_deref] + for (i = 0; i < size && entry_data_list; i++) { + *entry_data_list = (*entry_data_list)->next; + PyObject *value = from_entry_data_list(entry_data_list); + if (NULL == value) { + Py_DECREF(py_obj); + return NULL; + } + // PyList_SetItem 'steals' the reference + PyList_SetItem(py_obj, i, value); + } + return py_obj; +} + +static PyObject *from_uint128(const MMDB_entry_data_list_s *entry_data_list) +{ + uint64_t high = 0; + uint64_t low = 0; +#if MMDB_UINT128_IS_BYTE_ARRAY + int i; + for (i = 0; i < 8; i++) { + high = (high << 8) | entry_data_list->entry_data.uint128[i]; + } + + for (i = 8; i < 16; i++) { + low = (low << 8) | entry_data_list->entry_data.uint128[i]; + } +#else + high = entry_data_list->entry_data.uint128 >> 64; + low = (uint64_t)entry_data_list->entry_data.uint128; +#endif + + char *num_str = malloc(33); + if (NULL == num_str) { + PyErr_NoMemory(); + return NULL; + } + + snprintf(num_str, 33, "%016" PRIX64 "%016" PRIX64, high, low); + + PyObject *py_obj = PyLong_FromString(num_str, NULL, 16); + + free(num_str); + return py_obj; +} + +static PyMethodDef Reader_methods[] = { + { "get", Reader_get, METH_VARARGS, + "Get record for IP address" }, + { "metadata", Reader_metadata, METH_NOARGS, + "Returns metadata object for database" }, + { "close", Reader_close, METH_NOARGS, "Closes database"}, + { NULL, NULL, 0, NULL } +}; + +static PyTypeObject Reader_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_basicsize = sizeof(Reader_obj), + .tp_dealloc = Reader_dealloc, + .tp_doc = "Reader object", + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_methods = Reader_methods, + .tp_name = "Reader", + .tp_init = Reader_init, +}; + +static PyMethodDef Metadata_methods[] = { + { NULL, NULL, 0, NULL } +}; + +/* *INDENT-OFF* */ +static PyMemberDef Metadata_members[] = { + { "binary_format_major_version", T_OBJECT, offsetof( + Metadata_obj, binary_format_major_version), READONLY, NULL }, + { "binary_format_minor_version", T_OBJECT, offsetof( + Metadata_obj, binary_format_minor_version), READONLY, NULL }, + { "build_epoch", T_OBJECT, offsetof(Metadata_obj, build_epoch), + READONLY, NULL }, + { "database_type", T_OBJECT, offsetof(Metadata_obj, database_type), + READONLY, NULL }, + { "description", T_OBJECT, offsetof(Metadata_obj, description), + READONLY, NULL }, + { "ip_version", T_OBJECT, offsetof(Metadata_obj, ip_version), + READONLY, NULL }, + { "languages", T_OBJECT, offsetof(Metadata_obj, languages), READONLY, + NULL }, + { "node_count", T_OBJECT, offsetof(Metadata_obj, node_count), + READONLY, NULL }, + { "record_size", T_OBJECT, offsetof(Metadata_obj, record_size), + READONLY, NULL }, + { NULL, 0, 0, 0, NULL } +}; +/* *INDENT-ON* */ + +static PyTypeObject Metadata_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_basicsize = sizeof(Metadata_obj), + .tp_dealloc = Metadata_dealloc, + .tp_doc = "Metadata object", + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_members = Metadata_members, + .tp_methods = Metadata_methods, + .tp_name = "Metadata", + .tp_init = Metadata_init +}; + +static PyMethodDef MaxMindDB_methods[] = { + { NULL, NULL, 0, NULL } +}; + + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef MaxMindDB_module = { + PyModuleDef_HEAD_INIT, + .m_name = "extension", + .m_doc = "This is a C extension to read MaxMind DB file format", + .m_methods = MaxMindDB_methods, +}; +#endif + +MOD_INIT(extension){ + PyObject *m; + +#if PY_MAJOR_VERSION >= 3 + m = PyModule_Create(&MaxMindDB_module); +#else + m = Py_InitModule("extension", MaxMindDB_methods); +#endif + + if (!m) { + RETURN_MOD_INIT(NULL); + } + + Reader_Type.tp_new = PyType_GenericNew; + if (PyType_Ready(&Reader_Type)) { + RETURN_MOD_INIT(NULL); + } + Py_INCREF(&Reader_Type); + PyModule_AddObject(m, "Reader", (PyObject *)&Reader_Type); + + Metadata_Type.tp_new = PyType_GenericNew; + if (PyType_Ready(&Metadata_Type)) { + RETURN_MOD_INIT(NULL); + } + PyModule_AddObject(m, "extension", (PyObject *)&Metadata_Type); + + PyObject* error_mod = PyImport_ImportModule("maxminddb.errors"); + if (error_mod == NULL) { + RETURN_MOD_INIT(NULL); + } + + MaxMindDB_error = PyObject_GetAttrString(error_mod, "InvalidDatabaseError"); + Py_DECREF(error_mod); + + if (MaxMindDB_error == NULL) { + RETURN_MOD_INIT(NULL); + } + + Py_INCREF(MaxMindDB_error); + + /* We primarily add it to the module for backwards compatibility */ + PyModule_AddObject(m, "InvalidDatabaseError", MaxMindDB_error); + + RETURN_MOD_INIT(m); +} diff --git a/plugins/Sidebar/maxminddb/file.py b/plugins/Sidebar/maxminddb/file.py new file mode 100644 index 00000000..3460893e --- /dev/null +++ b/plugins/Sidebar/maxminddb/file.py @@ -0,0 +1,65 @@ +"""For internal use only. It provides a slice-like file reader.""" + +import os + +try: + from multiprocessing import Lock +except ImportError: + from threading import Lock + + +class FileBuffer(object): + + """A slice-able file reader""" + + def __init__(self, database): + self._handle = open(database, 'rb') + self._size = os.fstat(self._handle.fileno()).st_size + if not hasattr(os, 'pread'): + self._lock = Lock() + + def __getitem__(self, key): + if isinstance(key, slice): + return self._read(key.stop - key.start, key.start) + elif isinstance(key, int): + return self._read(1, key) + else: + raise TypeError("Invalid argument type.") + + def rfind(self, needle, start): + """Reverse find needle from start""" + pos = self._read(self._size - start - 1, start).rfind(needle) + if pos == -1: + return pos + return start + pos + + def size(self): + """Size of file""" + return self._size + + def close(self): + """Close file""" + self._handle.close() + + if hasattr(os, 'pread'): + + def _read(self, buffersize, offset): + """read that uses pread""" + # pylint: disable=no-member + return os.pread(self._handle.fileno(), buffersize, offset) + + else: + + def _read(self, buffersize, offset): + """read with a lock + + This lock is necessary as after a fork, the different processes + will share the same file table entry, even if we dup the fd, and + as such the same offsets. There does not appear to be a way to + duplicate the file table entry and we cannot re-open based on the + original path as that file may have replaced with another or + unlinked. + """ + with self._lock: + self._handle.seek(offset) + return self._handle.read(buffersize) diff --git a/plugins/Sidebar/maxminddb/ipaddr.py b/plugins/Sidebar/maxminddb/ipaddr.py new file mode 100644 index 00000000..ad27ae9d --- /dev/null +++ b/plugins/Sidebar/maxminddb/ipaddr.py @@ -0,0 +1,1897 @@ +#!/usr/bin/python +# +# Copyright 2007 Google Inc. +# Licensed to PSF under a Contributor Agreement. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""A fast, lightweight IPv4/IPv6 manipulation library in Python. + +This library is used to create/poke/manipulate IPv4 and IPv6 addresses +and networks. + +""" + +__version__ = '2.1.10' + +import struct + +IPV4LENGTH = 32 +IPV6LENGTH = 128 + + +class AddressValueError(ValueError): + """A Value Error related to the address.""" + + +class NetmaskValueError(ValueError): + """A Value Error related to the netmask.""" + + +def IPAddress(address, version=None): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + version: An Integer, 4 or 6. If set, don't try to automatically + determine what the IP address type is. important for things + like IPAddress(1), which could be IPv4, '0.0.0.1', or IPv6, + '::1'. + + Returns: + An IPv4Address or IPv6Address object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. + + """ + if version: + if version == 4: + return IPv4Address(address) + elif version == 6: + return IPv6Address(address) + + try: + return IPv4Address(address) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Address(address) + except (AddressValueError, NetmaskValueError): + pass + + raise ValueError('%r does not appear to be an IPv4 or IPv6 address' % + address) + + +def IPNetwork(address, version=None, strict=False): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + version: An Integer, if set, don't try to automatically + determine what the IP address type is. important for things + like IPNetwork(1), which could be IPv4, '0.0.0.1/32', or IPv6, + '::1/128'. + + Returns: + An IPv4Network or IPv6Network object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. Or if a strict network was requested and a strict + network wasn't given. + + """ + if version: + if version == 4: + return IPv4Network(address, strict) + elif version == 6: + return IPv6Network(address, strict) + + try: + return IPv4Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + try: + return IPv6Network(address, strict) + except (AddressValueError, NetmaskValueError): + pass + + raise ValueError('%r does not appear to be an IPv4 or IPv6 network' % + address) + + +def v4_int_to_packed(address): + """The binary representation of this address. + + Args: + address: An integer representation of an IPv4 IP address. + + Returns: + The binary representation of this address. + + Raises: + ValueError: If the integer is too large to be an IPv4 IP + address. + """ + if address > _BaseV4._ALL_ONES: + raise ValueError('Address too large for IPv4') + return Bytes(struct.pack('!I', address)) + + +def v6_int_to_packed(address): + """The binary representation of this address. + + Args: + address: An integer representation of an IPv4 IP address. + + Returns: + The binary representation of this address. + """ + return Bytes(struct.pack('!QQ', address >> 64, address & (2**64 - 1))) + + +def _find_address_range(addresses): + """Find a sequence of addresses. + + Args: + addresses: a list of IPv4 or IPv6 addresses. + + Returns: + A tuple containing the first and last IP addresses in the sequence. + + """ + first = last = addresses[0] + for ip in addresses[1:]: + if ip._ip == last._ip + 1: + last = ip + else: + break + return (first, last) + +def _get_prefix_length(number1, number2, bits): + """Get the number of leading bits that are same for two numbers. + + Args: + number1: an integer. + number2: another integer. + bits: the maximum number of bits to compare. + + Returns: + The number of leading bits that are the same for two numbers. + + """ + for i in range(bits): + if number1 >> i == number2 >> i: + return bits - i + return 0 + +def _count_righthand_zero_bits(number, bits): + """Count the number of zero bits on the right hand side. + + Args: + number: an integer. + bits: maximum number of bits to count. + + Returns: + The number of zero bits on the right hand side of the number. + + """ + if number == 0: + return bits + for i in range(bits): + if (number >> i) % 2: + return i + +def summarize_address_range(first, last): + """Summarize a network range given the first and last IP addresses. + + Example: + >>> summarize_address_range(IPv4Address('1.1.1.0'), + IPv4Address('1.1.1.130')) + [IPv4Network('1.1.1.0/25'), IPv4Network('1.1.1.128/31'), + IPv4Network('1.1.1.130/32')] + + Args: + first: the first IPv4Address or IPv6Address in the range. + last: the last IPv4Address or IPv6Address in the range. + + Returns: + The address range collapsed to a list of IPv4Network's or + IPv6Network's. + + Raise: + TypeError: + If the first and last objects are not IP addresses. + If the first and last objects are not the same version. + ValueError: + If the last object is not greater than the first. + If the version is not 4 or 6. + + """ + if not (isinstance(first, _BaseIP) and isinstance(last, _BaseIP)): + raise TypeError('first and last must be IP addresses, not networks') + if first.version != last.version: + raise TypeError("%s and %s are not of the same version" % ( + str(first), str(last))) + if first > last: + raise ValueError('last IP address must be greater than first') + + networks = [] + + if first.version == 4: + ip = IPv4Network + elif first.version == 6: + ip = IPv6Network + else: + raise ValueError('unknown IP version') + + ip_bits = first._max_prefixlen + first_int = first._ip + last_int = last._ip + while first_int <= last_int: + nbits = _count_righthand_zero_bits(first_int, ip_bits) + current = None + while nbits >= 0: + addend = 2**nbits - 1 + current = first_int + addend + nbits -= 1 + if current <= last_int: + break + prefix = _get_prefix_length(first_int, current, ip_bits) + net = ip('%s/%d' % (str(first), prefix)) + networks.append(net) + if current == ip._ALL_ONES: + break + first_int = current + 1 + first = IPAddress(first_int, version=first._version) + return networks + +def _collapse_address_list_recursive(addresses): + """Loops through the addresses, collapsing concurrent netblocks. + + Example: + + ip1 = IPv4Network('1.1.0.0/24') + ip2 = IPv4Network('1.1.1.0/24') + ip3 = IPv4Network('1.1.2.0/24') + ip4 = IPv4Network('1.1.3.0/24') + ip5 = IPv4Network('1.1.4.0/24') + ip6 = IPv4Network('1.1.0.1/22') + + _collapse_address_list_recursive([ip1, ip2, ip3, ip4, ip5, ip6]) -> + [IPv4Network('1.1.0.0/22'), IPv4Network('1.1.4.0/24')] + + This shouldn't be called directly; it is called via + collapse_address_list([]). + + Args: + addresses: A list of IPv4Network's or IPv6Network's + + Returns: + A list of IPv4Network's or IPv6Network's depending on what we were + passed. + + """ + ret_array = [] + optimized = False + + for cur_addr in addresses: + if not ret_array: + ret_array.append(cur_addr) + continue + if cur_addr in ret_array[-1]: + optimized = True + elif cur_addr == ret_array[-1].supernet().subnet()[1]: + ret_array.append(ret_array.pop().supernet()) + optimized = True + else: + ret_array.append(cur_addr) + + if optimized: + return _collapse_address_list_recursive(ret_array) + + return ret_array + + +def collapse_address_list(addresses): + """Collapse a list of IP objects. + + Example: + collapse_address_list([IPv4('1.1.0.0/24'), IPv4('1.1.1.0/24')]) -> + [IPv4('1.1.0.0/23')] + + Args: + addresses: A list of IPv4Network or IPv6Network objects. + + Returns: + A list of IPv4Network or IPv6Network objects depending on what we + were passed. + + Raises: + TypeError: If passed a list of mixed version objects. + + """ + i = 0 + addrs = [] + ips = [] + nets = [] + + # split IP addresses and networks + for ip in addresses: + if isinstance(ip, _BaseIP): + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + str(ip), str(ips[-1]))) + ips.append(ip) + elif ip._prefixlen == ip._max_prefixlen: + if ips and ips[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + str(ip), str(ips[-1]))) + ips.append(ip.ip) + else: + if nets and nets[-1]._version != ip._version: + raise TypeError("%s and %s are not of the same version" % ( + str(ip), str(ips[-1]))) + nets.append(ip) + + # sort and dedup + ips = sorted(set(ips)) + nets = sorted(set(nets)) + + while i < len(ips): + (first, last) = _find_address_range(ips[i:]) + i = ips.index(last) + 1 + addrs.extend(summarize_address_range(first, last)) + + return _collapse_address_list_recursive(sorted( + addrs + nets, key=_BaseNet._get_networks_key)) + +# backwards compatibility +CollapseAddrList = collapse_address_list + +# We need to distinguish between the string and packed-bytes representations +# of an IP address. For example, b'0::1' is the IPv4 address 48.58.58.49, +# while '0::1' is an IPv6 address. +# +# In Python 3, the native 'bytes' type already provides this functionality, +# so we use it directly. For earlier implementations where bytes is not a +# distinct type, we create a subclass of str to serve as a tag. +# +# Usage example (Python 2): +# ip = ipaddr.IPAddress(ipaddr.Bytes('xxxx')) +# +# Usage example (Python 3): +# ip = ipaddr.IPAddress(b'xxxx') +try: + if bytes is str: + raise TypeError("bytes is not a distinct type") + Bytes = bytes +except (NameError, TypeError): + class Bytes(str): + def __repr__(self): + return 'Bytes(%s)' % str.__repr__(self) + +def get_mixed_type_key(obj): + """Return a key suitable for sorting between networks and addresses. + + Address and Network objects are not sortable by default; they're + fundamentally different so the expression + + IPv4Address('1.1.1.1') <= IPv4Network('1.1.1.1/24') + + doesn't make any sense. There are some times however, where you may wish + to have ipaddr sort these for you anyway. If you need to do this, you + can use this function as the key= argument to sorted(). + + Args: + obj: either a Network or Address object. + Returns: + appropriate key. + + """ + if isinstance(obj, _BaseNet): + return obj._get_networks_key() + elif isinstance(obj, _BaseIP): + return obj._get_address_key() + return NotImplemented + +class _IPAddrBase(object): + + """The mother class.""" + + def __index__(self): + return self._ip + + def __int__(self): + return self._ip + + def __hex__(self): + return hex(self._ip) + + @property + def exploded(self): + """Return the longhand version of the IP address as a string.""" + return self._explode_shorthand_ip_string() + + @property + def compressed(self): + """Return the shorthand version of the IP address as a string.""" + return str(self) + + +class _BaseIP(_IPAddrBase): + + """A generic IP object. + + This IP class contains the version independent methods which are + used by single IP addresses. + + """ + + def __eq__(self, other): + try: + return (self._ip == other._ip + and self._version == other._version) + except AttributeError: + return NotImplemented + + def __ne__(self, other): + eq = self.__eq__(other) + if eq is NotImplemented: + return NotImplemented + return not eq + + def __le__(self, other): + gt = self.__gt__(other) + if gt is NotImplemented: + return NotImplemented + return not gt + + def __ge__(self, other): + lt = self.__lt__(other) + if lt is NotImplemented: + return NotImplemented + return not lt + + def __lt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseIP): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self._ip != other._ip: + return self._ip < other._ip + return False + + def __gt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseIP): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self._ip != other._ip: + return self._ip > other._ip + return False + + # Shorthand for Integer addition and subtraction. This is not + # meant to ever support addition/subtraction of addresses. + def __add__(self, other): + if not isinstance(other, int): + return NotImplemented + return IPAddress(int(self) + other, version=self._version) + + def __sub__(self, other): + if not isinstance(other, int): + return NotImplemented + return IPAddress(int(self) - other, version=self._version) + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, str(self)) + + def __str__(self): + return '%s' % self._string_from_ip_int(self._ip) + + def __hash__(self): + return hash(hex(long(self._ip))) + + def _get_address_key(self): + return (self._version, self) + + @property + def version(self): + raise NotImplementedError('BaseIP has no version') + + +class _BaseNet(_IPAddrBase): + + """A generic IP object. + + This IP class contains the version independent methods which are + used by networks. + + """ + + def __init__(self, address): + self._cache = {} + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, str(self)) + + def iterhosts(self): + """Generate Iterator over usable hosts in a network. + + This is like __iter__ except it doesn't return the network + or broadcast addresses. + + """ + cur = int(self.network) + 1 + bcast = int(self.broadcast) - 1 + while cur <= bcast: + cur += 1 + yield IPAddress(cur - 1, version=self._version) + + def __iter__(self): + cur = int(self.network) + bcast = int(self.broadcast) + while cur <= bcast: + cur += 1 + yield IPAddress(cur - 1, version=self._version) + + def __getitem__(self, n): + network = int(self.network) + broadcast = int(self.broadcast) + if n >= 0: + if network + n > broadcast: + raise IndexError + return IPAddress(network + n, version=self._version) + else: + n += 1 + if broadcast + n < network: + raise IndexError + return IPAddress(broadcast + n, version=self._version) + + def __lt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseNet): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self.network != other.network: + return self.network < other.network + if self.netmask != other.netmask: + return self.netmask < other.netmask + return False + + def __gt__(self, other): + if self._version != other._version: + raise TypeError('%s and %s are not of the same version' % ( + str(self), str(other))) + if not isinstance(other, _BaseNet): + raise TypeError('%s and %s are not of the same type' % ( + str(self), str(other))) + if self.network != other.network: + return self.network > other.network + if self.netmask != other.netmask: + return self.netmask > other.netmask + return False + + def __le__(self, other): + gt = self.__gt__(other) + if gt is NotImplemented: + return NotImplemented + return not gt + + def __ge__(self, other): + lt = self.__lt__(other) + if lt is NotImplemented: + return NotImplemented + return not lt + + def __eq__(self, other): + try: + return (self._version == other._version + and self.network == other.network + and int(self.netmask) == int(other.netmask)) + except AttributeError: + if isinstance(other, _BaseIP): + return (self._version == other._version + and self._ip == other._ip) + + def __ne__(self, other): + eq = self.__eq__(other) + if eq is NotImplemented: + return NotImplemented + return not eq + + def __str__(self): + return '%s/%s' % (str(self.ip), + str(self._prefixlen)) + + def __hash__(self): + return hash(int(self.network) ^ int(self.netmask)) + + def __contains__(self, other): + # always false if one is v4 and the other is v6. + if self._version != other._version: + return False + # dealing with another network. + if isinstance(other, _BaseNet): + return (self.network <= other.network and + self.broadcast >= other.broadcast) + # dealing with another address + else: + return (int(self.network) <= int(other._ip) <= + int(self.broadcast)) + + def overlaps(self, other): + """Tell if self is partly contained in other.""" + return self.network in other or self.broadcast in other or ( + other.network in self or other.broadcast in self) + + @property + def network(self): + x = self._cache.get('network') + if x is None: + x = IPAddress(self._ip & int(self.netmask), version=self._version) + self._cache['network'] = x + return x + + @property + def broadcast(self): + x = self._cache.get('broadcast') + if x is None: + x = IPAddress(self._ip | int(self.hostmask), version=self._version) + self._cache['broadcast'] = x + return x + + @property + def hostmask(self): + x = self._cache.get('hostmask') + if x is None: + x = IPAddress(int(self.netmask) ^ self._ALL_ONES, + version=self._version) + self._cache['hostmask'] = x + return x + + @property + def with_prefixlen(self): + return '%s/%d' % (str(self.ip), self._prefixlen) + + @property + def with_netmask(self): + return '%s/%s' % (str(self.ip), str(self.netmask)) + + @property + def with_hostmask(self): + return '%s/%s' % (str(self.ip), str(self.hostmask)) + + @property + def numhosts(self): + """Number of hosts in the current subnet.""" + return int(self.broadcast) - int(self.network) + 1 + + @property + def version(self): + raise NotImplementedError('BaseNet has no version') + + @property + def prefixlen(self): + return self._prefixlen + + def address_exclude(self, other): + """Remove an address from a larger block. + + For example: + + addr1 = IPNetwork('10.1.1.0/24') + addr2 = IPNetwork('10.1.1.0/26') + addr1.address_exclude(addr2) = + [IPNetwork('10.1.1.64/26'), IPNetwork('10.1.1.128/25')] + + or IPv6: + + addr1 = IPNetwork('::1/32') + addr2 = IPNetwork('::1/128') + addr1.address_exclude(addr2) = [IPNetwork('::0/128'), + IPNetwork('::2/127'), + IPNetwork('::4/126'), + IPNetwork('::8/125'), + ... + IPNetwork('0:0:8000::/33')] + + Args: + other: An IPvXNetwork object of the same type. + + Returns: + A sorted list of IPvXNetwork objects addresses which is self + minus other. + + Raises: + TypeError: If self and other are of difffering address + versions, or if other is not a network object. + ValueError: If other is not completely contained by self. + + """ + if not self._version == other._version: + raise TypeError("%s and %s are not of the same version" % ( + str(self), str(other))) + + if not isinstance(other, _BaseNet): + raise TypeError("%s is not a network object" % str(other)) + + if other not in self: + raise ValueError('%s not contained in %s' % (str(other), + str(self))) + if other == self: + return [] + + ret_addrs = [] + + # Make sure we're comparing the network of other. + other = IPNetwork('%s/%s' % (str(other.network), str(other.prefixlen)), + version=other._version) + + s1, s2 = self.subnet() + while s1 != other and s2 != other: + if other in s1: + ret_addrs.append(s2) + s1, s2 = s1.subnet() + elif other in s2: + ret_addrs.append(s1) + s1, s2 = s2.subnet() + else: + # If we got here, there's a bug somewhere. + assert True == False, ('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (str(s1), str(s2), str(other))) + if s1 == other: + ret_addrs.append(s2) + elif s2 == other: + ret_addrs.append(s1) + else: + # If we got here, there's a bug somewhere. + assert True == False, ('Error performing exclusion: ' + 's1: %s s2: %s other: %s' % + (str(s1), str(s2), str(other))) + + return sorted(ret_addrs, key=_BaseNet._get_networks_key) + + def compare_networks(self, other): + """Compare two IP objects. + + This is only concerned about the comparison of the integer + representation of the network addresses. This means that the + host bits aren't considered at all in this method. If you want + to compare host bits, you can easily enough do a + 'HostA._ip < HostB._ip' + + Args: + other: An IP object. + + Returns: + If the IP versions of self and other are the same, returns: + + -1 if self < other: + eg: IPv4('1.1.1.0/24') < IPv4('1.1.2.0/24') + IPv6('1080::200C:417A') < IPv6('1080::200B:417B') + 0 if self == other + eg: IPv4('1.1.1.1/24') == IPv4('1.1.1.2/24') + IPv6('1080::200C:417A/96') == IPv6('1080::200C:417B/96') + 1 if self > other + eg: IPv4('1.1.1.0/24') > IPv4('1.1.0.0/24') + IPv6('1080::1:200C:417A/112') > + IPv6('1080::0:200C:417A/112') + + If the IP versions of self and other are different, returns: + + -1 if self._version < other._version + eg: IPv4('10.0.0.1/24') < IPv6('::1/128') + 1 if self._version > other._version + eg: IPv6('::1/128') > IPv4('255.255.255.0/24') + + """ + if self._version < other._version: + return -1 + if self._version > other._version: + return 1 + # self._version == other._version below here: + if self.network < other.network: + return -1 + if self.network > other.network: + return 1 + # self.network == other.network below here: + if self.netmask < other.netmask: + return -1 + if self.netmask > other.netmask: + return 1 + # self.network == other.network and self.netmask == other.netmask + return 0 + + def _get_networks_key(self): + """Network-only key function. + + Returns an object that identifies this address' network and + netmask. This function is a suitable "key" argument for sorted() + and list.sort(). + + """ + return (self._version, self.network, self.netmask) + + def _ip_int_from_prefix(self, prefixlen=None): + """Turn the prefix length netmask into a int for comparison. + + Args: + prefixlen: An integer, the prefix length. + + Returns: + An integer. + + """ + if not prefixlen and prefixlen != 0: + prefixlen = self._prefixlen + return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen) + + def _prefix_from_ip_int(self, ip_int, mask=32): + """Return prefix length from the decimal netmask. + + Args: + ip_int: An integer, the IP address. + mask: The netmask. Defaults to 32. + + Returns: + An integer, the prefix length. + + """ + while mask: + if ip_int & 1 == 1: + break + ip_int >>= 1 + mask -= 1 + + return mask + + def _ip_string_from_prefix(self, prefixlen=None): + """Turn a prefix length into a dotted decimal string. + + Args: + prefixlen: An integer, the netmask prefix length. + + Returns: + A string, the dotted decimal netmask string. + + """ + if not prefixlen: + prefixlen = self._prefixlen + return self._string_from_ip_int(self._ip_int_from_prefix(prefixlen)) + + def iter_subnets(self, prefixlen_diff=1, new_prefix=None): + """The subnets which join to make the current subnet. + + In the case that self contains only one IP + (self._prefixlen == 32 for IPv4 or self._prefixlen == 128 + for IPv6), return a list with just ourself. + + Args: + prefixlen_diff: An integer, the amount the prefix length + should be increased by. This should not be set if + new_prefix is also set. + new_prefix: The desired new prefix length. This must be a + larger number (smaller prefix) than the existing prefix. + This should not be set if prefixlen_diff is also set. + + Returns: + An iterator of IPv(4|6) objects. + + Raises: + ValueError: The prefixlen_diff is too small or too large. + OR + prefixlen_diff and new_prefix are both set or new_prefix + is a smaller number than the current prefix (smaller + number means a larger network) + + """ + if self._prefixlen == self._max_prefixlen: + yield self + return + + if new_prefix is not None: + if new_prefix < self._prefixlen: + raise ValueError('new prefix must be longer') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = new_prefix - self._prefixlen + + if prefixlen_diff < 0: + raise ValueError('prefix length diff must be > 0') + new_prefixlen = self._prefixlen + prefixlen_diff + + if not self._is_valid_netmask(str(new_prefixlen)): + raise ValueError( + 'prefix length diff %d is invalid for netblock %s' % ( + new_prefixlen, str(self))) + + first = IPNetwork('%s/%s' % (str(self.network), + str(self._prefixlen + prefixlen_diff)), + version=self._version) + + yield first + current = first + while True: + broadcast = current.broadcast + if broadcast == self.broadcast: + return + new_addr = IPAddress(int(broadcast) + 1, version=self._version) + current = IPNetwork('%s/%s' % (str(new_addr), str(new_prefixlen)), + version=self._version) + + yield current + + def masked(self): + """Return the network object with the host bits masked out.""" + return IPNetwork('%s/%d' % (self.network, self._prefixlen), + version=self._version) + + def subnet(self, prefixlen_diff=1, new_prefix=None): + """Return a list of subnets, rather than an iterator.""" + return list(self.iter_subnets(prefixlen_diff, new_prefix)) + + def supernet(self, prefixlen_diff=1, new_prefix=None): + """The supernet containing the current network. + + Args: + prefixlen_diff: An integer, the amount the prefix length of + the network should be decreased by. For example, given a + /24 network and a prefixlen_diff of 3, a supernet with a + /21 netmask is returned. + + Returns: + An IPv4 network object. + + Raises: + ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a + negative prefix length. + OR + If prefixlen_diff and new_prefix are both set or new_prefix is a + larger number than the current prefix (larger number means a + smaller network) + + """ + if self._prefixlen == 0: + return self + + if new_prefix is not None: + if new_prefix > self._prefixlen: + raise ValueError('new prefix must be shorter') + if prefixlen_diff != 1: + raise ValueError('cannot set prefixlen_diff and new_prefix') + prefixlen_diff = self._prefixlen - new_prefix + + + if self.prefixlen - prefixlen_diff < 0: + raise ValueError( + 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % + (self.prefixlen, prefixlen_diff)) + return IPNetwork('%s/%s' % (str(self.network), + str(self.prefixlen - prefixlen_diff)), + version=self._version) + + # backwards compatibility + Subnet = subnet + Supernet = supernet + AddressExclude = address_exclude + CompareNetworks = compare_networks + Contains = __contains__ + + +class _BaseV4(object): + + """Base IPv4 object. + + The following methods are used by IPv4 objects in both single IP + addresses and networks. + + """ + + # Equivalent to 255.255.255.255 or 32 bits of 1's. + _ALL_ONES = (2**IPV4LENGTH) - 1 + _DECIMAL_DIGITS = frozenset('0123456789') + + def __init__(self, address): + self._version = 4 + self._max_prefixlen = IPV4LENGTH + + def _explode_shorthand_ip_string(self): + return str(self) + + def _ip_int_from_string(self, ip_str): + """Turn the given IP string into an integer for comparison. + + Args: + ip_str: A string, the IP ip_str. + + Returns: + The IP ip_str as an integer. + + Raises: + AddressValueError: if ip_str isn't a valid IPv4 Address. + + """ + octets = ip_str.split('.') + if len(octets) != 4: + raise AddressValueError(ip_str) + + packed_ip = 0 + for oc in octets: + try: + packed_ip = (packed_ip << 8) | self._parse_octet(oc) + except ValueError: + raise AddressValueError(ip_str) + return packed_ip + + def _parse_octet(self, octet_str): + """Convert a decimal octet into an integer. + + Args: + octet_str: A string, the number to parse. + + Returns: + The octet as an integer. + + Raises: + ValueError: if the octet isn't strictly a decimal from [0..255]. + + """ + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not self._DECIMAL_DIGITS.issuperset(octet_str): + raise ValueError + octet_int = int(octet_str, 10) + # Disallow leading zeroes, because no clear standard exists on + # whether these should be interpreted as decimal or octal. + if octet_int > 255 or (octet_str[0] == '0' and len(octet_str) > 1): + raise ValueError + return octet_int + + def _string_from_ip_int(self, ip_int): + """Turns a 32-bit integer into dotted decimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + The IP address as a string in dotted decimal notation. + + """ + octets = [] + for _ in xrange(4): + octets.insert(0, str(ip_int & 0xFF)) + ip_int >>= 8 + return '.'.join(octets) + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def packed(self): + """The binary representation of this address.""" + return v4_int_to_packed(self._ip) + + @property + def version(self): + return self._version + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within the + reserved IPv4 Network range. + + """ + return self in IPv4Network('240.0.0.0/4') + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per RFC 1918. + + """ + return (self in IPv4Network('10.0.0.0/8') or + self in IPv4Network('172.16.0.0/12') or + self in IPv4Network('192.168.0.0/16')) + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is multicast. + See RFC 3171 for details. + + """ + return self in IPv4Network('224.0.0.0/4') + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 5735 3. + + """ + return self in IPv4Network('0.0.0.0') + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback per RFC 3330. + + """ + return self in IPv4Network('127.0.0.0/8') + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is link-local per RFC 3927. + + """ + return self in IPv4Network('169.254.0.0/16') + + +class IPv4Address(_BaseV4, _BaseIP): + + """Represent and manipulate single IPv4 Addresses.""" + + def __init__(self, address): + + """ + Args: + address: A string or integer representing the IP + '192.168.1.1' + + Additionally, an integer can be passed, so + IPv4Address('192.168.1.1') == IPv4Address(3232235777). + or, more generally + IPv4Address(int(IPv4Address('192.168.1.1'))) == + IPv4Address('192.168.1.1') + + Raises: + AddressValueError: If ipaddr isn't a valid IPv4 address. + + """ + _BaseV4.__init__(self, address) + + # Efficient constructor from integer. + if isinstance(address, (int, long)): + self._ip = address + if address < 0 or address > self._ALL_ONES: + raise AddressValueError(address) + return + + # Constructing from a packed address + if isinstance(address, Bytes): + try: + self._ip, = struct.unpack('!I', address) + except struct.error: + raise AddressValueError(address) # Wrong length. + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = str(address) + self._ip = self._ip_int_from_string(addr_str) + + +class IPv4Network(_BaseV4, _BaseNet): + + """This class represents and manipulates 32-bit IPv4 networks. + + Attributes: [examples for IPv4Network('1.2.3.4/27')] + ._ip: 16909060 + .ip: IPv4Address('1.2.3.4') + .network: IPv4Address('1.2.3.0') + .hostmask: IPv4Address('0.0.0.31') + .broadcast: IPv4Address('1.2.3.31') + .netmask: IPv4Address('255.255.255.224') + .prefixlen: 27 + + """ + + # the valid octets for host and netmasks. only useful for IPv4. + _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0)) + + def __init__(self, address, strict=False): + """Instantiate a new IPv4 network object. + + Args: + address: A string or integer representing the IP [& network]. + '192.168.1.1/24' + '192.168.1.1/255.255.255.0' + '192.168.1.1/0.0.0.255' + are all functionally the same in IPv4. Similarly, + '192.168.1.1' + '192.168.1.1/255.255.255.255' + '192.168.1.1/32' + are also functionaly equivalent. That is to say, failing to + provide a subnetmask will create an object with a mask of /32. + + If the mask (portion after the / in the argument) is given in + dotted quad form, it is treated as a netmask if it starts with a + non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it + starts with a zero field (e.g. 0.255.255.255 == /8), with the + single exception of an all-zero mask which is treated as a + netmask == /0. If no mask is given, a default of /32 is used. + + Additionally, an integer can be passed, so + IPv4Network('192.168.1.1') == IPv4Network(3232235777). + or, more generally + IPv4Network(int(IPv4Network('192.168.1.1'))) == + IPv4Network('192.168.1.1') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 192.168.1.0/24 and not an + IP address on a network, eg, 192.168.1.1/24. + + Raises: + AddressValueError: If ipaddr isn't a valid IPv4 address. + NetmaskValueError: If the netmask isn't valid for + an IPv4 address. + ValueError: If strict was True and a network address was not + supplied. + + """ + _BaseNet.__init__(self, address) + _BaseV4.__init__(self, address) + + # Constructing from an integer or packed bytes. + if isinstance(address, (int, long, Bytes)): + self.ip = IPv4Address(address) + self._ip = self.ip._ip + self._prefixlen = self._max_prefixlen + self.netmask = IPv4Address(self._ALL_ONES) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = str(address).split('/') + + if len(addr) > 2: + raise AddressValueError(address) + + self._ip = self._ip_int_from_string(addr[0]) + self.ip = IPv4Address(self._ip) + + if len(addr) == 2: + mask = addr[1].split('.') + if len(mask) == 4: + # We have dotted decimal netmask. + if self._is_valid_netmask(addr[1]): + self.netmask = IPv4Address(self._ip_int_from_string( + addr[1])) + elif self._is_hostmask(addr[1]): + self.netmask = IPv4Address( + self._ip_int_from_string(addr[1]) ^ self._ALL_ONES) + else: + raise NetmaskValueError('%s is not a valid netmask' + % addr[1]) + + self._prefixlen = self._prefix_from_ip_int(int(self.netmask)) + else: + # We have a netmask in prefix length form. + if not self._is_valid_netmask(addr[1]): + raise NetmaskValueError(addr[1]) + self._prefixlen = int(addr[1]) + self.netmask = IPv4Address(self._ip_int_from_prefix( + self._prefixlen)) + else: + self._prefixlen = self._max_prefixlen + self.netmask = IPv4Address(self._ip_int_from_prefix( + self._prefixlen)) + if strict: + if self.ip != self.network: + raise ValueError('%s has host bits set' % + self.ip) + if self._prefixlen == (self._max_prefixlen - 1): + self.iterhosts = self.__iter__ + + def _is_hostmask(self, ip_str): + """Test if the IP string is a hostmask (rather than a netmask). + + Args: + ip_str: A string, the potential hostmask. + + Returns: + A boolean, True if the IP string is a hostmask. + + """ + bits = ip_str.split('.') + try: + parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] + except ValueError: + return False + if len(parts) != len(bits): + return False + if parts[0] < parts[-1]: + return True + return False + + def _is_valid_netmask(self, netmask): + """Verify that the netmask is valid. + + Args: + netmask: A string, either a prefix or dotted decimal + netmask. + + Returns: + A boolean, True if the prefix represents a valid IPv4 + netmask. + + """ + mask = netmask.split('.') + if len(mask) == 4: + if [x for x in mask if int(x) not in self._valid_mask_octets]: + return False + if [y for idx, y in enumerate(mask) if idx > 0 and + y > mask[idx - 1]]: + return False + return True + try: + netmask = int(netmask) + except ValueError: + return False + return 0 <= netmask <= self._max_prefixlen + + # backwards compatibility + IsRFC1918 = lambda self: self.is_private + IsMulticast = lambda self: self.is_multicast + IsLoopback = lambda self: self.is_loopback + IsLinkLocal = lambda self: self.is_link_local + + +class _BaseV6(object): + + """Base IPv6 object. + + The following methods are used by IPv6 objects in both single IP + addresses and networks. + + """ + + _ALL_ONES = (2**IPV6LENGTH) - 1 + _HEXTET_COUNT = 8 + _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') + + def __init__(self, address): + self._version = 6 + self._max_prefixlen = IPV6LENGTH + + def _ip_int_from_string(self, ip_str): + """Turn an IPv6 ip_str into an integer. + + Args: + ip_str: A string, the IPv6 ip_str. + + Returns: + A long, the IPv6 ip_str. + + Raises: + AddressValueError: if ip_str isn't a valid IPv6 Address. + + """ + parts = ip_str.split(':') + + # An IPv6 address needs at least 2 colons (3 parts). + if len(parts) < 3: + raise AddressValueError(ip_str) + + # If the address has an IPv4-style suffix, convert it to hexadecimal. + if '.' in parts[-1]: + ipv4_int = IPv4Address(parts.pop())._ip + parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF)) + parts.append('%x' % (ipv4_int & 0xFFFF)) + + # An IPv6 address can't have more than 8 colons (9 parts). + if len(parts) > self._HEXTET_COUNT + 1: + raise AddressValueError(ip_str) + + # Disregarding the endpoints, find '::' with nothing in between. + # This indicates that a run of zeroes has been skipped. + try: + skip_index, = ( + [i for i in xrange(1, len(parts) - 1) if not parts[i]] or + [None]) + except ValueError: + # Can't have more than one '::' + raise AddressValueError(ip_str) + + # parts_hi is the number of parts to copy from above/before the '::' + # parts_lo is the number of parts to copy from below/after the '::' + if skip_index is not None: + # If we found a '::', then check if it also covers the endpoints. + parts_hi = skip_index + parts_lo = len(parts) - skip_index - 1 + if not parts[0]: + parts_hi -= 1 + if parts_hi: + raise AddressValueError(ip_str) # ^: requires ^:: + if not parts[-1]: + parts_lo -= 1 + if parts_lo: + raise AddressValueError(ip_str) # :$ requires ::$ + parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo) + if parts_skipped < 1: + raise AddressValueError(ip_str) + else: + # Otherwise, allocate the entire address to parts_hi. The endpoints + # could still be empty, but _parse_hextet() will check for that. + if len(parts) != self._HEXTET_COUNT: + raise AddressValueError(ip_str) + parts_hi = len(parts) + parts_lo = 0 + parts_skipped = 0 + + try: + # Now, parse the hextets into a 128-bit integer. + ip_int = 0L + for i in xrange(parts_hi): + ip_int <<= 16 + ip_int |= self._parse_hextet(parts[i]) + ip_int <<= 16 * parts_skipped + for i in xrange(-parts_lo, 0): + ip_int <<= 16 + ip_int |= self._parse_hextet(parts[i]) + return ip_int + except ValueError: + raise AddressValueError(ip_str) + + def _parse_hextet(self, hextet_str): + """Convert an IPv6 hextet string into an integer. + + Args: + hextet_str: A string, the number to parse. + + Returns: + The hextet as an integer. + + Raises: + ValueError: if the input isn't strictly a hex number from [0..FFFF]. + + """ + # Whitelist the characters, since int() allows a lot of bizarre stuff. + if not self._HEX_DIGITS.issuperset(hextet_str): + raise ValueError + hextet_int = int(hextet_str, 16) + if hextet_int > 0xFFFF: + raise ValueError + return hextet_int + + def _compress_hextets(self, hextets): + """Compresses a list of hextets. + + Compresses a list of strings, replacing the longest continuous + sequence of "0" in the list with "" and adding empty strings at + the beginning or at the end of the string such that subsequently + calling ":".join(hextets) will produce the compressed version of + the IPv6 address. + + Args: + hextets: A list of strings, the hextets to compress. + + Returns: + A list of strings. + + """ + best_doublecolon_start = -1 + best_doublecolon_len = 0 + doublecolon_start = -1 + doublecolon_len = 0 + for index in range(len(hextets)): + if hextets[index] == '0': + doublecolon_len += 1 + if doublecolon_start == -1: + # Start of a sequence of zeros. + doublecolon_start = index + if doublecolon_len > best_doublecolon_len: + # This is the longest sequence of zeros so far. + best_doublecolon_len = doublecolon_len + best_doublecolon_start = doublecolon_start + else: + doublecolon_len = 0 + doublecolon_start = -1 + + if best_doublecolon_len > 1: + best_doublecolon_end = (best_doublecolon_start + + best_doublecolon_len) + # For zeros at the end of the address. + if best_doublecolon_end == len(hextets): + hextets += [''] + hextets[best_doublecolon_start:best_doublecolon_end] = [''] + # For zeros at the beginning of the address. + if best_doublecolon_start == 0: + hextets = [''] + hextets + + return hextets + + def _string_from_ip_int(self, ip_int=None): + """Turns a 128-bit integer into hexadecimal notation. + + Args: + ip_int: An integer, the IP address. + + Returns: + A string, the hexadecimal representation of the address. + + Raises: + ValueError: The address is bigger than 128 bits of all ones. + + """ + if not ip_int and ip_int != 0: + ip_int = int(self._ip) + + if ip_int > self._ALL_ONES: + raise ValueError('IPv6 address is too large') + + hex_str = '%032x' % ip_int + hextets = [] + for x in range(0, 32, 4): + hextets.append('%x' % int(hex_str[x:x+4], 16)) + + hextets = self._compress_hextets(hextets) + return ':'.join(hextets) + + def _explode_shorthand_ip_string(self): + """Expand a shortened IPv6 address. + + Args: + ip_str: A string, the IPv6 address. + + Returns: + A string, the expanded IPv6 address. + + """ + if isinstance(self, _BaseNet): + ip_str = str(self.ip) + else: + ip_str = str(self) + + ip_int = self._ip_int_from_string(ip_str) + parts = [] + for i in xrange(self._HEXTET_COUNT): + parts.append('%04x' % (ip_int & 0xFFFF)) + ip_int >>= 16 + parts.reverse() + if isinstance(self, _BaseNet): + return '%s/%d' % (':'.join(parts), self.prefixlen) + return ':'.join(parts) + + @property + def max_prefixlen(self): + return self._max_prefixlen + + @property + def packed(self): + """The binary representation of this address.""" + return v6_int_to_packed(self._ip) + + @property + def version(self): + return self._version + + @property + def is_multicast(self): + """Test if the address is reserved for multicast use. + + Returns: + A boolean, True if the address is a multicast address. + See RFC 2373 2.7 for details. + + """ + return self in IPv6Network('ff00::/8') + + @property + def is_reserved(self): + """Test if the address is otherwise IETF reserved. + + Returns: + A boolean, True if the address is within one of the + reserved IPv6 Network ranges. + + """ + return (self in IPv6Network('::/8') or + self in IPv6Network('100::/8') or + self in IPv6Network('200::/7') or + self in IPv6Network('400::/6') or + self in IPv6Network('800::/5') or + self in IPv6Network('1000::/4') or + self in IPv6Network('4000::/3') or + self in IPv6Network('6000::/3') or + self in IPv6Network('8000::/3') or + self in IPv6Network('A000::/3') or + self in IPv6Network('C000::/3') or + self in IPv6Network('E000::/4') or + self in IPv6Network('F000::/5') or + self in IPv6Network('F800::/6') or + self in IPv6Network('FE00::/9')) + + @property + def is_unspecified(self): + """Test if the address is unspecified. + + Returns: + A boolean, True if this is the unspecified address as defined in + RFC 2373 2.5.2. + + """ + return self._ip == 0 and getattr(self, '_prefixlen', 128) == 128 + + @property + def is_loopback(self): + """Test if the address is a loopback address. + + Returns: + A boolean, True if the address is a loopback address as defined in + RFC 2373 2.5.3. + + """ + return self._ip == 1 and getattr(self, '_prefixlen', 128) == 128 + + @property + def is_link_local(self): + """Test if the address is reserved for link-local. + + Returns: + A boolean, True if the address is reserved per RFC 4291. + + """ + return self in IPv6Network('fe80::/10') + + @property + def is_site_local(self): + """Test if the address is reserved for site-local. + + Note that the site-local address space has been deprecated by RFC 3879. + Use is_private to test if this address is in the space of unique local + addresses as defined by RFC 4193. + + Returns: + A boolean, True if the address is reserved per RFC 3513 2.5.6. + + """ + return self in IPv6Network('fec0::/10') + + @property + def is_private(self): + """Test if this address is allocated for private networks. + + Returns: + A boolean, True if the address is reserved per RFC 4193. + + """ + return self in IPv6Network('fc00::/7') + + @property + def ipv4_mapped(self): + """Return the IPv4 mapped address. + + Returns: + If the IPv6 address is a v4 mapped address, return the + IPv4 mapped address. Return None otherwise. + + """ + if (self._ip >> 32) != 0xFFFF: + return None + return IPv4Address(self._ip & 0xFFFFFFFF) + + @property + def teredo(self): + """Tuple of embedded teredo IPs. + + Returns: + Tuple of the (server, client) IPs or None if the address + doesn't appear to be a teredo address (doesn't start with + 2001::/32) + + """ + if (self._ip >> 96) != 0x20010000: + return None + return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), + IPv4Address(~self._ip & 0xFFFFFFFF)) + + @property + def sixtofour(self): + """Return the IPv4 6to4 embedded address. + + Returns: + The IPv4 6to4-embedded address if present or None if the + address doesn't appear to contain a 6to4 embedded address. + + """ + if (self._ip >> 112) != 0x2002: + return None + return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) + + +class IPv6Address(_BaseV6, _BaseIP): + + """Represent and manipulate single IPv6 Addresses. + """ + + def __init__(self, address): + """Instantiate a new IPv6 address object. + + Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv6Address('2001:4860::') == + IPv6Address(42541956101370907050197289607612071936L). + or, more generally + IPv6Address(IPv6Address('2001:4860::')._ip) == + IPv6Address('2001:4860::') + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + + """ + _BaseV6.__init__(self, address) + + # Efficient constructor from integer. + if isinstance(address, (int, long)): + self._ip = address + if address < 0 or address > self._ALL_ONES: + raise AddressValueError(address) + return + + # Constructing from a packed address + if isinstance(address, Bytes): + try: + hi, lo = struct.unpack('!QQ', address) + except struct.error: + raise AddressValueError(address) # Wrong length. + self._ip = (hi << 64) | lo + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP string. + addr_str = str(address) + if not addr_str: + raise AddressValueError('') + + self._ip = self._ip_int_from_string(addr_str) + + +class IPv6Network(_BaseV6, _BaseNet): + + """This class represents and manipulates 128-bit IPv6 networks. + + Attributes: [examples for IPv6('2001:658:22A:CAFE:200::1/64')] + .ip: IPv6Address('2001:658:22a:cafe:200::1') + .network: IPv6Address('2001:658:22a:cafe::') + .hostmask: IPv6Address('::ffff:ffff:ffff:ffff') + .broadcast: IPv6Address('2001:658:22a:cafe:ffff:ffff:ffff:ffff') + .netmask: IPv6Address('ffff:ffff:ffff:ffff::') + .prefixlen: 64 + + """ + + + def __init__(self, address, strict=False): + """Instantiate a new IPv6 Network object. + + Args: + address: A string or integer representing the IPv6 network or the IP + and prefix/netmask. + '2001:4860::/128' + '2001:4860:0000:0000:0000:0000:0000:0000/128' + '2001:4860::' + are all functionally the same in IPv6. That is to say, + failing to provide a subnetmask will create an object with + a mask of /128. + + Additionally, an integer can be passed, so + IPv6Network('2001:4860::') == + IPv6Network(42541956101370907050197289607612071936L). + or, more generally + IPv6Network(IPv6Network('2001:4860::')._ip) == + IPv6Network('2001:4860::') + + strict: A boolean. If true, ensure that we have been passed + A true network address, eg, 192.168.1.0/24 and not an + IP address on a network, eg, 192.168.1.1/24. + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + NetmaskValueError: If the netmask isn't valid for + an IPv6 address. + ValueError: If strict was True and a network address was not + supplied. + + """ + _BaseNet.__init__(self, address) + _BaseV6.__init__(self, address) + + # Constructing from an integer or packed bytes. + if isinstance(address, (int, long, Bytes)): + self.ip = IPv6Address(address) + self._ip = self.ip._ip + self._prefixlen = self._max_prefixlen + self.netmask = IPv6Address(self._ALL_ONES) + return + + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + addr = str(address).split('/') + + if len(addr) > 2: + raise AddressValueError(address) + + self._ip = self._ip_int_from_string(addr[0]) + self.ip = IPv6Address(self._ip) + + if len(addr) == 2: + if self._is_valid_netmask(addr[1]): + self._prefixlen = int(addr[1]) + else: + raise NetmaskValueError(addr[1]) + else: + self._prefixlen = self._max_prefixlen + + self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen)) + + if strict: + if self.ip != self.network: + raise ValueError('%s has host bits set' % + self.ip) + if self._prefixlen == (self._max_prefixlen - 1): + self.iterhosts = self.__iter__ + + def _is_valid_netmask(self, prefixlen): + """Verify that the netmask/prefixlen is valid. + + Args: + prefixlen: A string, the netmask in prefix length format. + + Returns: + A boolean, True if the prefix represents a valid IPv6 + netmask. + + """ + try: + prefixlen = int(prefixlen) + except ValueError: + return False + return 0 <= prefixlen <= self._max_prefixlen + + @property + def with_netmask(self): + return self.with_prefixlen diff --git a/plugins/Sidebar/maxminddb/reader.py b/plugins/Sidebar/maxminddb/reader.py new file mode 100644 index 00000000..5ecfbdf2 --- /dev/null +++ b/plugins/Sidebar/maxminddb/reader.py @@ -0,0 +1,221 @@ +""" +maxminddb.reader +~~~~~~~~~~~~~~~~ + +This module contains the pure Python database reader and related classes. + +""" +from __future__ import unicode_literals + +try: + import mmap +except ImportError: + # pylint: disable=invalid-name + mmap = None + +import struct + +from maxminddb.compat import byte_from_int, int_from_byte, ipaddress +from maxminddb.const import MODE_AUTO, MODE_MMAP, MODE_FILE, MODE_MEMORY +from maxminddb.decoder import Decoder +from maxminddb.errors import InvalidDatabaseError +from maxminddb.file import FileBuffer + + +class Reader(object): + + """ + Instances of this class provide a reader for the MaxMind DB format. IP + addresses can be looked up using the ``get`` method. + """ + + _DATA_SECTION_SEPARATOR_SIZE = 16 + _METADATA_START_MARKER = b"\xAB\xCD\xEFMaxMind.com" + + _ipv4_start = None + + def __init__(self, database, mode=MODE_AUTO): + """Reader for the MaxMind DB file format + + Arguments: + database -- A path to a valid MaxMind DB file such as a GeoIP2 + database file. + mode -- mode to open the database with. Valid mode are: + * MODE_MMAP - read from memory map. + * MODE_FILE - read database as standard file. + * MODE_MEMORY - load database into memory. + * MODE_AUTO - tries MODE_MMAP and then MODE_FILE. Default. + """ + if (mode == MODE_AUTO and mmap) or mode == MODE_MMAP: + with open(database, 'rb') as db_file: + self._buffer = mmap.mmap( + db_file.fileno(), 0, access=mmap.ACCESS_READ) + self._buffer_size = self._buffer.size() + elif mode in (MODE_AUTO, MODE_FILE): + self._buffer = FileBuffer(database) + self._buffer_size = self._buffer.size() + elif mode == MODE_MEMORY: + with open(database, 'rb') as db_file: + self._buffer = db_file.read() + self._buffer_size = len(self._buffer) + else: + raise ValueError('Unsupported open mode ({0}). Only MODE_AUTO, ' + ' MODE_FILE, and MODE_MEMORY are support by the pure Python ' + 'Reader'.format(mode)) + + metadata_start = self._buffer.rfind(self._METADATA_START_MARKER, + max(0, self._buffer_size + - 128 * 1024)) + + if metadata_start == -1: + self.close() + raise InvalidDatabaseError('Error opening database file ({0}). ' + 'Is this a valid MaxMind DB file?' + ''.format(database)) + + metadata_start += len(self._METADATA_START_MARKER) + metadata_decoder = Decoder(self._buffer, metadata_start) + (metadata, _) = metadata_decoder.decode(metadata_start) + self._metadata = Metadata( + **metadata) # pylint: disable=bad-option-value + + self._decoder = Decoder(self._buffer, self._metadata.search_tree_size + + self._DATA_SECTION_SEPARATOR_SIZE) + + def metadata(self): + """Return the metadata associated with the MaxMind DB file""" + return self._metadata + + def get(self, ip_address): + """Return the record for the ip_address in the MaxMind DB + + + Arguments: + ip_address -- an IP address in the standard string notation + """ + address = ipaddress.ip_address(ip_address) + + if address.version == 6 and self._metadata.ip_version == 4: + raise ValueError('Error looking up {0}. You attempted to look up ' + 'an IPv6 address in an IPv4-only database.'.format( + ip_address)) + pointer = self._find_address_in_tree(address) + + return self._resolve_data_pointer(pointer) if pointer else None + + def _find_address_in_tree(self, ip_address): + packed = ip_address.packed + + bit_count = len(packed) * 8 + node = self._start_node(bit_count) + + for i in range(bit_count): + if node >= self._metadata.node_count: + break + bit = 1 & (int_from_byte(packed[i >> 3]) >> 7 - (i % 8)) + node = self._read_node(node, bit) + if node == self._metadata.node_count: + # Record is empty + return 0 + elif node > self._metadata.node_count: + return node + + raise InvalidDatabaseError('Invalid node in search tree') + + def _start_node(self, length): + if self._metadata.ip_version != 6 or length == 128: + return 0 + + # We are looking up an IPv4 address in an IPv6 tree. Skip over the + # first 96 nodes. + if self._ipv4_start: + return self._ipv4_start + + node = 0 + for _ in range(96): + if node >= self._metadata.node_count: + break + node = self._read_node(node, 0) + self._ipv4_start = node + return node + + def _read_node(self, node_number, index): + base_offset = node_number * self._metadata.node_byte_size + + record_size = self._metadata.record_size + if record_size == 24: + offset = base_offset + index * 3 + node_bytes = b'\x00' + self._buffer[offset:offset + 3] + elif record_size == 28: + (middle,) = struct.unpack( + b'!B', self._buffer[base_offset + 3:base_offset + 4]) + if index: + middle &= 0x0F + else: + middle = (0xF0 & middle) >> 4 + offset = base_offset + index * 4 + node_bytes = byte_from_int( + middle) + self._buffer[offset:offset + 3] + elif record_size == 32: + offset = base_offset + index * 4 + node_bytes = self._buffer[offset:offset + 4] + else: + raise InvalidDatabaseError( + 'Unknown record size: {0}'.format(record_size)) + return struct.unpack(b'!I', node_bytes)[0] + + def _resolve_data_pointer(self, pointer): + resolved = pointer - self._metadata.node_count + \ + self._metadata.search_tree_size + + if resolved > self._buffer_size: + raise InvalidDatabaseError( + "The MaxMind DB file's search tree is corrupt") + + (data, _) = self._decoder.decode(resolved) + return data + + def close(self): + """Closes the MaxMind DB file and returns the resources to the system""" + # pylint: disable=unidiomatic-typecheck + if type(self._buffer) not in (str, bytes): + self._buffer.close() + + +class Metadata(object): + + """Metadata for the MaxMind DB reader""" + + # pylint: disable=too-many-instance-attributes + def __init__(self, **kwargs): + """Creates new Metadata object. kwargs are key/value pairs from spec""" + # Although I could just update __dict__, that is less obvious and it + # doesn't work well with static analysis tools and some IDEs + self.node_count = kwargs['node_count'] + self.record_size = kwargs['record_size'] + self.ip_version = kwargs['ip_version'] + self.database_type = kwargs['database_type'] + self.languages = kwargs['languages'] + self.binary_format_major_version = kwargs[ + 'binary_format_major_version'] + self.binary_format_minor_version = kwargs[ + 'binary_format_minor_version'] + self.build_epoch = kwargs['build_epoch'] + self.description = kwargs['description'] + + @property + def node_byte_size(self): + """The size of a node in bytes""" + return self.record_size // 4 + + @property + def search_tree_size(self): + """The size of the search tree""" + return self.node_count * self.node_byte_size + + def __repr__(self): + args = ', '.join('%s=%r' % x for x in self.__dict__.items()) + return '{module}.{class_name}({data})'.format( + module=self.__module__, + class_name=self.__class__.__name__, + data=args) diff --git a/plugins/Sidebar/media-globe/Detector.js b/plugins/Sidebar/media-globe/Detector.js new file mode 100644 index 00000000..1c074b83 --- /dev/null +++ b/plugins/Sidebar/media-globe/Detector.js @@ -0,0 +1,60 @@ +/** + * @author alteredq / http://alteredqualia.com/ + * @author mr.doob / http://mrdoob.com/ + */ + +Detector = { + + canvas : !! window.CanvasRenderingContext2D, + webgl : ( function () { try { return !! window.WebGLRenderingContext && !! document.createElement( 'canvas' ).getContext( 'experimental-webgl' ); } catch( e ) { return false; } } )(), + workers : !! window.Worker, + fileapi : window.File && window.FileReader && window.FileList && window.Blob, + + getWebGLErrorMessage : function () { + + var domElement = document.createElement( 'div' ); + + domElement.style.fontFamily = 'monospace'; + domElement.style.fontSize = '13px'; + domElement.style.textAlign = 'center'; + domElement.style.background = '#eee'; + domElement.style.color = '#000'; + domElement.style.padding = '1em'; + domElement.style.width = '475px'; + domElement.style.margin = '5em auto 0'; + + if ( ! this.webgl ) { + + domElement.innerHTML = window.WebGLRenderingContext ? [ + 'Sorry, your graphics card doesn\'t support WebGL' + ].join( '\n' ) : [ + 'Sorry, your browser doesn\'t support WebGL
    ', + 'Please try with', + 'Chrome, ', + 'Firefox 4 or', + 'Webkit Nightly (Mac)' + ].join( '\n' ); + + } + + return domElement; + + }, + + addGetWebGLMessage : function ( parameters ) { + + var parent, id, domElement; + + parameters = parameters || {}; + + parent = parameters.parent !== undefined ? parameters.parent : document.body; + id = parameters.id !== undefined ? parameters.id : 'oldie'; + + domElement = Detector.getWebGLErrorMessage(); + domElement.id = id; + + parent.appendChild( domElement ); + + } + +}; diff --git a/plugins/Sidebar/media-globe/Tween.js b/plugins/Sidebar/media-globe/Tween.js new file mode 100644 index 00000000..bdf141ad --- /dev/null +++ b/plugins/Sidebar/media-globe/Tween.js @@ -0,0 +1,12 @@ +// Tween.js - http://github.com/sole/tween.js +var TWEEN=TWEEN||function(){var a,e,c,d,f=[];return{start:function(g){c=setInterval(this.update,1E3/(g||60))},stop:function(){clearInterval(c)},add:function(g){f.push(g)},remove:function(g){a=f.indexOf(g);a!==-1&&f.splice(a,1)},update:function(){a=0;e=f.length;for(d=(new Date).getTime();a1?1:b;i=n(b);for(h in c)a[h]=e[h]+c[h]*i;l!==null&&l.call(a,i);if(b==1){m!==null&&m.call(a);k!==null&&k.start();return false}return true}};TWEEN.Easing={Linear:{},Quadratic:{},Cubic:{},Quartic:{},Quintic:{},Sinusoidal:{},Exponential:{},Circular:{},Elastic:{},Back:{},Bounce:{}};TWEEN.Easing.Linear.EaseNone=function(a){return a}; +TWEEN.Easing.Quadratic.EaseIn=function(a){return a*a};TWEEN.Easing.Quadratic.EaseOut=function(a){return-a*(a-2)};TWEEN.Easing.Quadratic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a;return-0.5*(--a*(a-2)-1)};TWEEN.Easing.Cubic.EaseIn=function(a){return a*a*a};TWEEN.Easing.Cubic.EaseOut=function(a){return--a*a*a+1};TWEEN.Easing.Cubic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a;return 0.5*((a-=2)*a*a+2)};TWEEN.Easing.Quartic.EaseIn=function(a){return a*a*a*a}; +TWEEN.Easing.Quartic.EaseOut=function(a){return-(--a*a*a*a-1)};TWEEN.Easing.Quartic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a;return-0.5*((a-=2)*a*a*a-2)};TWEEN.Easing.Quintic.EaseIn=function(a){return a*a*a*a*a};TWEEN.Easing.Quintic.EaseOut=function(a){return(a-=1)*a*a*a*a+1};TWEEN.Easing.Quintic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a*a;return 0.5*((a-=2)*a*a*a*a+2)};TWEEN.Easing.Sinusoidal.EaseIn=function(a){return-Math.cos(a*Math.PI/2)+1}; +TWEEN.Easing.Sinusoidal.EaseOut=function(a){return Math.sin(a*Math.PI/2)};TWEEN.Easing.Sinusoidal.EaseInOut=function(a){return-0.5*(Math.cos(Math.PI*a)-1)};TWEEN.Easing.Exponential.EaseIn=function(a){return a==0?0:Math.pow(2,10*(a-1))};TWEEN.Easing.Exponential.EaseOut=function(a){return a==1?1:-Math.pow(2,-10*a)+1};TWEEN.Easing.Exponential.EaseInOut=function(a){if(a==0)return 0;if(a==1)return 1;if((a*=2)<1)return 0.5*Math.pow(2,10*(a-1));return 0.5*(-Math.pow(2,-10*(a-1))+2)}; +TWEEN.Easing.Circular.EaseIn=function(a){return-(Math.sqrt(1-a*a)-1)};TWEEN.Easing.Circular.EaseOut=function(a){return Math.sqrt(1- --a*a)};TWEEN.Easing.Circular.EaseInOut=function(a){if((a/=0.5)<1)return-0.5*(Math.sqrt(1-a*a)-1);return 0.5*(Math.sqrt(1-(a-=2)*a)+1)};TWEEN.Easing.Elastic.EaseIn=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return-(c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d))}; +TWEEN.Easing.Elastic.EaseOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return c*Math.pow(2,-10*a)*Math.sin((a-e)*2*Math.PI/d)+1}; +TWEEN.Easing.Elastic.EaseInOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);if((a*=2)<1)return-0.5*c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d);return c*Math.pow(2,-10*(a-=1))*Math.sin((a-e)*2*Math.PI/d)*0.5+1};TWEEN.Easing.Back.EaseIn=function(a){return a*a*(2.70158*a-1.70158)};TWEEN.Easing.Back.EaseOut=function(a){return(a-=1)*a*(2.70158*a+1.70158)+1}; +TWEEN.Easing.Back.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*(3.5949095*a-2.5949095);return 0.5*((a-=2)*a*(3.5949095*a+2.5949095)+2)};TWEEN.Easing.Bounce.EaseIn=function(a){return 1-TWEEN.Easing.Bounce.EaseOut(1-a)};TWEEN.Easing.Bounce.EaseOut=function(a){return(a/=1)<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+0.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+0.9375:7.5625*(a-=2.625/2.75)*a+0.984375}; +TWEEN.Easing.Bounce.EaseInOut=function(a){if(a<0.5)return TWEEN.Easing.Bounce.EaseIn(a*2)*0.5;return TWEEN.Easing.Bounce.EaseOut(a*2-1)*0.5+0.5}; diff --git a/plugins/Sidebar/media-globe/all.js b/plugins/Sidebar/media-globe/all.js new file mode 100644 index 00000000..6d41940f --- /dev/null +++ b/plugins/Sidebar/media-globe/all.js @@ -0,0 +1,1346 @@ + + +/* ---- plugins/Sidebar/media-globe/Detector.js ---- */ + + +/** + * @author alteredq / http://alteredqualia.com/ + * @author mr.doob / http://mrdoob.com/ + */ + +Detector = { + + canvas : !! window.CanvasRenderingContext2D, + webgl : ( function () { try { return !! window.WebGLRenderingContext && !! document.createElement( 'canvas' ).getContext( 'experimental-webgl' ); } catch( e ) { return false; } } )(), + workers : !! window.Worker, + fileapi : window.File && window.FileReader && window.FileList && window.Blob, + + getWebGLErrorMessage : function () { + + var domElement = document.createElement( 'div' ); + + domElement.style.fontFamily = 'monospace'; + domElement.style.fontSize = '13px'; + domElement.style.textAlign = 'center'; + domElement.style.background = '#eee'; + domElement.style.color = '#000'; + domElement.style.padding = '1em'; + domElement.style.width = '475px'; + domElement.style.margin = '5em auto 0'; + + if ( ! this.webgl ) { + + domElement.innerHTML = window.WebGLRenderingContext ? [ + 'Sorry, your graphics card doesn\'t support WebGL' + ].join( '\n' ) : [ + 'Sorry, your browser doesn\'t support WebGL
    ', + 'Please try with', + 'Chrome, ', + 'Firefox 4 or', + 'Webkit Nightly (Mac)' + ].join( '\n' ); + + } + + return domElement; + + }, + + addGetWebGLMessage : function ( parameters ) { + + var parent, id, domElement; + + parameters = parameters || {}; + + parent = parameters.parent !== undefined ? parameters.parent : document.body; + id = parameters.id !== undefined ? parameters.id : 'oldie'; + + domElement = Detector.getWebGLErrorMessage(); + domElement.id = id; + + parent.appendChild( domElement ); + + } + +}; + + + +/* ---- plugins/Sidebar/media-globe/Tween.js ---- */ + + +// Tween.js - http://github.com/sole/tween.js +var TWEEN=TWEEN||function(){var a,e,c,d,f=[];return{start:function(g){c=setInterval(this.update,1E3/(g||60))},stop:function(){clearInterval(c)},add:function(g){f.push(g)},remove:function(g){a=f.indexOf(g);a!==-1&&f.splice(a,1)},update:function(){a=0;e=f.length;for(d=(new Date).getTime();a1?1:b;i=n(b);for(h in c)a[h]=e[h]+c[h]*i;l!==null&&l.call(a,i);if(b==1){m!==null&&m.call(a);k!==null&&k.start();return false}return true}};TWEEN.Easing={Linear:{},Quadratic:{},Cubic:{},Quartic:{},Quintic:{},Sinusoidal:{},Exponential:{},Circular:{},Elastic:{},Back:{},Bounce:{}};TWEEN.Easing.Linear.EaseNone=function(a){return a}; +TWEEN.Easing.Quadratic.EaseIn=function(a){return a*a};TWEEN.Easing.Quadratic.EaseOut=function(a){return-a*(a-2)};TWEEN.Easing.Quadratic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a;return-0.5*(--a*(a-2)-1)};TWEEN.Easing.Cubic.EaseIn=function(a){return a*a*a};TWEEN.Easing.Cubic.EaseOut=function(a){return--a*a*a+1};TWEEN.Easing.Cubic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a;return 0.5*((a-=2)*a*a+2)};TWEEN.Easing.Quartic.EaseIn=function(a){return a*a*a*a}; +TWEEN.Easing.Quartic.EaseOut=function(a){return-(--a*a*a*a-1)};TWEEN.Easing.Quartic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a;return-0.5*((a-=2)*a*a*a-2)};TWEEN.Easing.Quintic.EaseIn=function(a){return a*a*a*a*a};TWEEN.Easing.Quintic.EaseOut=function(a){return(a-=1)*a*a*a*a+1};TWEEN.Easing.Quintic.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*a*a*a;return 0.5*((a-=2)*a*a*a*a+2)};TWEEN.Easing.Sinusoidal.EaseIn=function(a){return-Math.cos(a*Math.PI/2)+1}; +TWEEN.Easing.Sinusoidal.EaseOut=function(a){return Math.sin(a*Math.PI/2)};TWEEN.Easing.Sinusoidal.EaseInOut=function(a){return-0.5*(Math.cos(Math.PI*a)-1)};TWEEN.Easing.Exponential.EaseIn=function(a){return a==0?0:Math.pow(2,10*(a-1))};TWEEN.Easing.Exponential.EaseOut=function(a){return a==1?1:-Math.pow(2,-10*a)+1};TWEEN.Easing.Exponential.EaseInOut=function(a){if(a==0)return 0;if(a==1)return 1;if((a*=2)<1)return 0.5*Math.pow(2,10*(a-1));return 0.5*(-Math.pow(2,-10*(a-1))+2)}; +TWEEN.Easing.Circular.EaseIn=function(a){return-(Math.sqrt(1-a*a)-1)};TWEEN.Easing.Circular.EaseOut=function(a){return Math.sqrt(1- --a*a)};TWEEN.Easing.Circular.EaseInOut=function(a){if((a/=0.5)<1)return-0.5*(Math.sqrt(1-a*a)-1);return 0.5*(Math.sqrt(1-(a-=2)*a)+1)};TWEEN.Easing.Elastic.EaseIn=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return-(c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d))}; +TWEEN.Easing.Elastic.EaseOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);return c*Math.pow(2,-10*a)*Math.sin((a-e)*2*Math.PI/d)+1}; +TWEEN.Easing.Elastic.EaseInOut=function(a){var e,c=0.1,d=0.4;if(a==0)return 0;if(a==1)return 1;d||(d=0.3);if(!c||c<1){c=1;e=d/4}else e=d/(2*Math.PI)*Math.asin(1/c);if((a*=2)<1)return-0.5*c*Math.pow(2,10*(a-=1))*Math.sin((a-e)*2*Math.PI/d);return c*Math.pow(2,-10*(a-=1))*Math.sin((a-e)*2*Math.PI/d)*0.5+1};TWEEN.Easing.Back.EaseIn=function(a){return a*a*(2.70158*a-1.70158)};TWEEN.Easing.Back.EaseOut=function(a){return(a-=1)*a*(2.70158*a+1.70158)+1}; +TWEEN.Easing.Back.EaseInOut=function(a){if((a*=2)<1)return 0.5*a*a*(3.5949095*a-2.5949095);return 0.5*((a-=2)*a*(3.5949095*a+2.5949095)+2)};TWEEN.Easing.Bounce.EaseIn=function(a){return 1-TWEEN.Easing.Bounce.EaseOut(1-a)};TWEEN.Easing.Bounce.EaseOut=function(a){return(a/=1)<1/2.75?7.5625*a*a:a<2/2.75?7.5625*(a-=1.5/2.75)*a+0.75:a<2.5/2.75?7.5625*(a-=2.25/2.75)*a+0.9375:7.5625*(a-=2.625/2.75)*a+0.984375}; +TWEEN.Easing.Bounce.EaseInOut=function(a){if(a<0.5)return TWEEN.Easing.Bounce.EaseIn(a*2)*0.5;return TWEEN.Easing.Bounce.EaseOut(a*2-1)*0.5+0.5}; + + + +/* ---- plugins/Sidebar/media-globe/globe.js ---- */ + + +/** + * dat.globe Javascript WebGL Globe Toolkit + * http://dataarts.github.com/dat.globe + * + * Copyright 2011 Data Arts Team, Google Creative Lab + * + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ + +var DAT = DAT || {}; + +DAT.Globe = function(container, opts) { + opts = opts || {}; + + var colorFn = opts.colorFn || function(x) { + var c = new THREE.Color(); + c.setHSL( ( 0.5 - (x * 2) ), Math.max(0.8, 1.0 - (x * 3)), 0.5 ); + return c; + }; + var imgDir = opts.imgDir || '/globe/'; + + var Shaders = { + 'earth' : { + uniforms: { + 'texture': { type: 't', value: null } + }, + vertexShader: [ + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + 'vNormal = normalize( normalMatrix * normal );', + 'vUv = uv;', + '}' + ].join('\n'), + fragmentShader: [ + 'uniform sampler2D texture;', + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'vec3 diffuse = texture2D( texture, vUv ).xyz;', + 'float intensity = 1.05 - dot( vNormal, vec3( 0.0, 0.0, 1.0 ) );', + 'vec3 atmosphere = vec3( 1.0, 1.0, 1.0 ) * pow( intensity, 3.0 );', + 'gl_FragColor = vec4( diffuse + atmosphere, 1.0 );', + '}' + ].join('\n') + }, + 'atmosphere' : { + uniforms: {}, + vertexShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'vNormal = normalize( normalMatrix * normal );', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + '}' + ].join('\n'), + fragmentShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'float intensity = pow( 0.8 - dot( vNormal, vec3( 0, 0, 1.0 ) ), 12.0 );', + 'gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0 ) * intensity;', + '}' + ].join('\n') + } + }; + + var camera, scene, renderer, w, h; + var mesh, atmosphere, point, running; + + var overRenderer; + var running = true; + + var curZoomSpeed = 0; + var zoomSpeed = 50; + + var mouse = { x: 0, y: 0 }, mouseOnDown = { x: 0, y: 0 }; + var rotation = { x: 0, y: 0 }, + target = { x: Math.PI*3/2, y: Math.PI / 6.0 }, + targetOnDown = { x: 0, y: 0 }; + + var distance = 100000, distanceTarget = 100000; + var padding = 10; + var PI_HALF = Math.PI / 2; + + function init() { + + container.style.color = '#fff'; + container.style.font = '13px/20px Arial, sans-serif'; + + var shader, uniforms, material; + w = container.offsetWidth || window.innerWidth; + h = container.offsetHeight || window.innerHeight; + + camera = new THREE.PerspectiveCamera(30, w / h, 1, 10000); + camera.position.z = distance; + + scene = new THREE.Scene(); + + var geometry = new THREE.SphereGeometry(200, 40, 30); + + shader = Shaders['earth']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + uniforms['texture'].value = THREE.ImageUtils.loadTexture(imgDir+'world.jpg'); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.rotation.y = Math.PI; + scene.add(mesh); + + shader = Shaders['atmosphere']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader, + side: THREE.BackSide, + blending: THREE.AdditiveBlending, + transparent: true + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.scale.set( 1.1, 1.1, 1.1 ); + scene.add(mesh); + + geometry = new THREE.BoxGeometry(2.75, 2.75, 1); + geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0,0,-0.5)); + + point = new THREE.Mesh(geometry); + + renderer = new THREE.WebGLRenderer({antialias: true}); + renderer.setSize(w, h); + renderer.setClearColor( 0x212121, 1 ); + + renderer.domElement.style.position = 'relative'; + + container.appendChild(renderer.domElement); + + container.addEventListener('mousedown', onMouseDown, false); + + if ('onwheel' in document) { + container.addEventListener('wheel', onMouseWheel, false); + } else { + container.addEventListener('mousewheel', onMouseWheel, false); + } + + document.addEventListener('keydown', onDocumentKeyDown, false); + + window.addEventListener('resize', onWindowResize, false); + + container.addEventListener('mouseover', function() { + overRenderer = true; + }, false); + + container.addEventListener('mouseout', function() { + overRenderer = false; + }, false); + } + + function addData(data, opts) { + var lat, lng, size, color, i, step, colorFnWrapper; + + opts.animated = opts.animated || false; + this.is_animated = opts.animated; + opts.format = opts.format || 'magnitude'; // other option is 'legend' + if (opts.format === 'magnitude') { + step = 3; + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else if (opts.format === 'legend') { + step = 4; + colorFnWrapper = function(data, i) { return colorFn(data[i+3]); } + } else if (opts.format === 'peer') { + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else { + throw('error: format not supported: '+opts.format); + } + + if (opts.animated) { + if (this._baseGeometry === undefined) { + this._baseGeometry = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; +// size = data[i + 2]; + color = colorFnWrapper(data,i); + size = 0; + addPoint(lat, lng, size, color, this._baseGeometry); + } + } + if(this._morphTargetId === undefined) { + this._morphTargetId = 0; + } else { + this._morphTargetId += 1; + } + opts.name = opts.name || 'morphTarget'+this._morphTargetId; + } + var subgeo = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; + color = colorFnWrapper(data,i); + size = data[i + 2]; + size = size*200; + addPoint(lat, lng, size, color, subgeo); + } + if (opts.animated) { + this._baseGeometry.morphTargets.push({'name': opts.name, vertices: subgeo.vertices}); + } else { + this._baseGeometry = subgeo; + } + + }; + + function createPoints() { + if (this._baseGeometry !== undefined) { + if (this.is_animated === false) { + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: false + })); + } else { + if (this._baseGeometry.morphTargets.length < 8) { + console.log('t l',this._baseGeometry.morphTargets.length); + var padding = 8-this._baseGeometry.morphTargets.length; + console.log('padding', padding); + for(var i=0; i<=padding; i++) { + console.log('padding',i); + this._baseGeometry.morphTargets.push({'name': 'morphPadding'+i, vertices: this._baseGeometry.vertices}); + } + } + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: true + })); + } + scene.add(this.points); + } + } + + function addPoint(lat, lng, size, color, subgeo) { + + var phi = (90 - lat) * Math.PI / 180; + var theta = (180 - lng) * Math.PI / 180; + + point.position.x = 200 * Math.sin(phi) * Math.cos(theta); + point.position.y = 200 * Math.cos(phi); + point.position.z = 200 * Math.sin(phi) * Math.sin(theta); + + point.lookAt(mesh.position); + + point.scale.z = Math.max( size, 0.1 ); // avoid non-invertible matrix + point.updateMatrix(); + + for (var i = 0; i < point.geometry.faces.length; i++) { + + point.geometry.faces[i].color = color; + + } + if(point.matrixAutoUpdate){ + point.updateMatrix(); + } + subgeo.merge(point.geometry, point.matrix); + } + + function onMouseDown(event) { + event.preventDefault(); + + container.addEventListener('mousemove', onMouseMove, false); + container.addEventListener('mouseup', onMouseUp, false); + container.addEventListener('mouseout', onMouseOut, false); + + mouseOnDown.x = - event.clientX; + mouseOnDown.y = event.clientY; + + targetOnDown.x = target.x; + targetOnDown.y = target.y; + + container.style.cursor = 'move'; + } + + function onMouseMove(event) { + mouse.x = - event.clientX; + mouse.y = event.clientY; + + var zoomDamp = distance/1000; + + target.x = targetOnDown.x + (mouse.x - mouseOnDown.x) * 0.005 * zoomDamp; + target.y = targetOnDown.y + (mouse.y - mouseOnDown.y) * 0.005 * zoomDamp; + + target.y = target.y > PI_HALF ? PI_HALF : target.y; + target.y = target.y < - PI_HALF ? - PI_HALF : target.y; + } + + function onMouseUp(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + container.style.cursor = 'auto'; + } + + function onMouseOut(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + } + + function onMouseWheel(event) { + event.preventDefault(); + if (overRenderer) { + if (event.deltaY) { + zoom(-event.deltaY * (event.deltaMode == 0 ? 1 : 50)); + } else { + zoom(event.wheelDeltaY * 0.3); + } + } + return false; + } + + function onDocumentKeyDown(event) { + switch (event.keyCode) { + case 38: + zoom(100); + event.preventDefault(); + break; + case 40: + zoom(-100); + event.preventDefault(); + break; + } + } + + function onWindowResize( event ) { + camera.aspect = container.offsetWidth / container.offsetHeight; + camera.updateProjectionMatrix(); + renderer.setSize( container.offsetWidth, container.offsetHeight ); + } + + function zoom(delta) { + distanceTarget -= delta; + distanceTarget = distanceTarget > 855 ? 855 : distanceTarget; + distanceTarget = distanceTarget < 350 ? 350 : distanceTarget; + } + + function animate() { + if (!running) return + requestAnimationFrame(animate); + render(); + } + + function render() { + zoom(curZoomSpeed); + + rotation.x += (target.x - rotation.x) * 0.1; + rotation.y += (target.y - rotation.y) * 0.1; + distance += (distanceTarget - distance) * 0.3; + + camera.position.x = distance * Math.sin(rotation.x) * Math.cos(rotation.y); + camera.position.y = distance * Math.sin(rotation.y); + camera.position.z = distance * Math.cos(rotation.x) * Math.cos(rotation.y); + + camera.lookAt(mesh.position); + + renderer.render(scene, camera); + } + + function unload() { + running = false + container.removeEventListener('mousedown', onMouseDown, false); + container.removeEventListener('mousewheel', onMouseWheel, false); + if ('onwheel' in document) { + container.removeEventListener('wheel', onMouseWheel, false); + } else { + container.removeEventListener('mousewheel', onMouseWheel, false); + } + document.removeEventListener('keydown', onDocumentKeyDown, false); + window.removeEventListener('resize', onWindowResize, false); + + } + + init(); + this.animate = animate; + this.unload = unload; + + + this.__defineGetter__('time', function() { + return this._time || 0; + }); + + this.__defineSetter__('time', function(t) { + var validMorphs = []; + var morphDict = this.points.morphTargetDictionary; + for(var k in morphDict) { + if(k.indexOf('morphPadding') < 0) { + validMorphs.push(morphDict[k]); + } + } + validMorphs.sort(); + var l = validMorphs.length-1; + var scaledt = t*l+1; + var index = Math.floor(scaledt); + for (i=0;i= 0) { + this.points.morphTargetInfluences[lastIndex] = 1 - leftover; + } + this.points.morphTargetInfluences[index] = leftover; + this._time = t; + }); + + this.addData = addData; + this.createPoints = createPoints; + this.renderer = renderer; + this.scene = scene; + + return this; + +}; + + + + +/* ---- plugins/Sidebar/media-globe/three.min.js ---- */ + + +// threejs.org/license +'use strict';var THREE={REVISION:"69"};"object"===typeof module&&(module.exports=THREE);void 0===Math.sign&&(Math.sign=function(a){return 0>a?-1:0>16&255)/255;this.g=(a>>8&255)/255;this.b=(a&255)/255;return this},setRGB:function(a,b,c){this.r=a;this.g=b;this.b=c;return this},setHSL:function(a,b,c){if(0===b)this.r=this.g=this.b=c;else{var d=function(a,b,c){0>c&&(c+=1);1c?b:c<2/3?a+6*(b-a)*(2/3-c):a};b=.5>=c?c*(1+b):c+b-c*b;c=2*c-b;this.r=d(c,b,a+1/3);this.g=d(c,b,a);this.b=d(c,b,a-1/3)}return this},setStyle:function(a){if(/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.test(a))return a=/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.exec(a),this.r=Math.min(255,parseInt(a[1],10))/255,this.g=Math.min(255,parseInt(a[2],10))/255,this.b=Math.min(255,parseInt(a[3],10))/255,this;if(/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.test(a))return a=/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.exec(a),this.r= +Math.min(100,parseInt(a[1],10))/100,this.g=Math.min(100,parseInt(a[2],10))/100,this.b=Math.min(100,parseInt(a[3],10))/100,this;if(/^\#([0-9a-f]{6})$/i.test(a))return a=/^\#([0-9a-f]{6})$/i.exec(a),this.setHex(parseInt(a[1],16)),this;if(/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.test(a))return a=/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(a),this.setHex(parseInt(a[1]+a[1]+a[2]+a[2]+a[3]+a[3],16)),this;if(/^(\w+)$/i.test(a))return this.setHex(THREE.ColorKeywords[a]),this},copy:function(a){this.r=a.r;this.g= +a.g;this.b=a.b;return this},copyGammaToLinear:function(a){this.r=a.r*a.r;this.g=a.g*a.g;this.b=a.b*a.b;return this},copyLinearToGamma:function(a){this.r=Math.sqrt(a.r);this.g=Math.sqrt(a.g);this.b=Math.sqrt(a.b);return this},convertGammaToLinear:function(){var a=this.r,b=this.g,c=this.b;this.r=a*a;this.g=b*b;this.b=c*c;return this},convertLinearToGamma:function(){this.r=Math.sqrt(this.r);this.g=Math.sqrt(this.g);this.b=Math.sqrt(this.b);return this},getHex:function(){return 255*this.r<<16^255*this.g<< +8^255*this.b<<0},getHexString:function(){return("000000"+this.getHex().toString(16)).slice(-6)},getHSL:function(a){a=a||{h:0,s:0,l:0};var b=this.r,c=this.g,d=this.b,e=Math.max(b,c,d),f=Math.min(b,c,d),g,h=(f+e)/2;if(f===e)f=g=0;else{var k=e-f,f=.5>=h?k/(e+f):k/(2-e-f);switch(e){case b:g=(c-d)/k+(cf&&c>b?(c=2*Math.sqrt(1+c-f-b),this._w=(k-g)/c,this._x=.25*c,this._y=(a+e)/c,this._z=(d+h)/c):f>b?(c=2*Math.sqrt(1+f-c-b),this._w=(d-h)/c,this._x=(a+e)/c,this._y= +.25*c,this._z=(g+k)/c):(c=2*Math.sqrt(1+b-c-f),this._w=(e-a)/c,this._x=(d+h)/c,this._y=(g+k)/c,this._z=.25*c);this.onChangeCallback();return this},setFromUnitVectors:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3);b=c.dot(d)+1;1E-6>b?(b=0,Math.abs(c.x)>Math.abs(c.z)?a.set(-c.y,c.x,0):a.set(0,-c.z,c.y)):a.crossVectors(c,d);this._x=a.x;this._y=a.y;this._z=a.z;this._w=b;this.normalize();return this}}(),inverse:function(){this.conjugate().normalize();return this},conjugate:function(){this._x*= +-1;this._y*=-1;this._z*=-1;this.onChangeCallback();return this},dot:function(a){return this._x*a._x+this._y*a._y+this._z*a._z+this._w*a._w},lengthSq:function(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w},length:function(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)},normalize:function(){var a=this.length();0===a?(this._z=this._y=this._x=0,this._w=1):(a=1/a,this._x*=a,this._y*=a,this._z*=a,this._w*=a);this.onChangeCallback();return this}, +multiply:function(a,b){return void 0!==b?(console.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(a,b)):this.multiplyQuaternions(this,a)},multiplyQuaternions:function(a,b){var c=a._x,d=a._y,e=a._z,f=a._w,g=b._x,h=b._y,k=b._z,n=b._w;this._x=c*n+f*g+d*k-e*h;this._y=d*n+f*h+e*g-c*k;this._z=e*n+f*k+c*h-d*g;this._w=f*n-c*g-d*h-e*k;this.onChangeCallback();return this},multiplyVector3:function(a){console.warn("THREE.Quaternion: .multiplyVector3() has been removed. Use is now vector.applyQuaternion( quaternion ) instead."); +return a.applyQuaternion(this)},slerp:function(a,b){if(0===b)return this;if(1===b)return this.copy(a);var c=this._x,d=this._y,e=this._z,f=this._w,g=f*a._w+c*a._x+d*a._y+e*a._z;0>g?(this._w=-a._w,this._x=-a._x,this._y=-a._y,this._z=-a._z,g=-g):this.copy(a);if(1<=g)return this._w=f,this._x=c,this._y=d,this._z=e,this;var h=Math.acos(g),k=Math.sqrt(1-g*g);if(.001>Math.abs(k))return this._w=.5*(f+this._w),this._x=.5*(c+this._x),this._y=.5*(d+this._y),this._z=.5*(e+this._z),this;g=Math.sin((1-b)*h)/k;h= +Math.sin(b*h)/k;this._w=f*g+this._w*h;this._x=c*g+this._x*h;this._y=d*g+this._y*h;this._z=e*g+this._z*h;this.onChangeCallback();return this},equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._w===this._w},fromArray:function(a,b){void 0===b&&(b=0);this._x=a[b];this._y=a[b+1];this._z=a[b+2];this._w=a[b+3];this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._w;return a},onChange:function(a){this.onChangeCallback= +a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Quaternion(this._x,this._y,this._z,this._w)}};THREE.Quaternion.slerp=function(a,b,c,d){return c.copy(a).slerp(b,d)};THREE.Vector2=function(a,b){this.x=a||0;this.y=b||0}; +THREE.Vector2.prototype={constructor:THREE.Vector2,set:function(a,b){this.x=a;this.y=b;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;return this},add:function(a, +b){if(void 0!==b)return console.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;return this},addScalar:function(a){this.x+=a;this.y+=a;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;return this}, +subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;return this},multiply:function(a){this.x*=a.x;this.y*=a.y;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;return this},divide:function(a){this.x/=a.x;this.y/=a.y;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a):this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector2,b=new THREE.Vector2);a.set(c,c);b.set(d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);return this}, +roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);return this},negate:function(){this.x=-this.x;this.y=-this.y;return this},dot:function(a){return this.x*a.x+this.y*a.y},lengthSq:function(){return this.x*this.x+this.y*this.y},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y)},normalize:function(){return this.divideScalar(this.length())},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b= +this.x-a.x;a=this.y-a.y;return b*b+a*a},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;return a},clone:function(){return new THREE.Vector2(this.x,this.y)}}; +THREE.Vector3=function(a,b,c){this.x=a||0;this.y=b||0;this.z=c||0}; +THREE.Vector3.prototype={constructor:THREE.Vector3,set:function(a,b,c){this.x=a;this.y=b;this.z=c;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:throw Error("index is out of range: "+ +a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."), +this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;return this},multiply:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(a,b);this.x*=a.x;this.y*=a.y;this.z*=a.z;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;return this},multiplyVectors:function(a,b){this.x=a.x*b.x;this.y= +a.y*b.y;this.z=a.z*b.z;return this},applyEuler:function(){var a;return function(b){!1===b instanceof THREE.Euler&&console.error("THREE.Vector3: .applyEuler() now expects a Euler rotation rather than a Vector3 and order.");void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromEuler(b));return this}}(),applyAxisAngle:function(){var a;return function(b,c){void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromAxisAngle(b,c));return this}}(),applyMatrix3:function(a){var b=this.x, +c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[3]*c+a[6]*d;this.y=a[1]*b+a[4]*c+a[7]*d;this.z=a[2]*b+a[5]*c+a[8]*d;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12];this.y=a[1]*b+a[5]*c+a[9]*d+a[13];this.z=a[2]*b+a[6]*c+a[10]*d+a[14];return this},applyProjection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;var e=1/(a[3]*b+a[7]*c+a[11]*d+a[15]);this.x=(a[0]*b+a[4]*c+a[8]*d+a[12])*e;this.y=(a[1]*b+a[5]*c+a[9]*d+a[13])*e;this.z= +(a[2]*b+a[6]*c+a[10]*d+a[14])*e;return this},applyQuaternion:function(a){var b=this.x,c=this.y,d=this.z,e=a.x,f=a.y,g=a.z;a=a.w;var h=a*b+f*d-g*c,k=a*c+g*b-e*d,n=a*d+e*c-f*b,b=-e*b-f*c-g*d;this.x=h*a+b*-e+k*-g-n*-f;this.y=k*a+b*-f+n*-e-h*-g;this.z=n*a+b*-g+h*-f-k*-e;return this},project:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.projectionMatrix,a.getInverse(b.matrixWorld));return this.applyProjection(a)}}(),unproject:function(){var a;return function(b){void 0=== +a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.matrixWorld,a.getInverse(b.projectionMatrix));return this.applyProjection(a)}}(),transformDirection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d;this.y=a[1]*b+a[5]*c+a[9]*d;this.z=a[2]*b+a[6]*c+a[10]*d;this.normalize();return this},divide:function(a){this.x/=a.x;this.y/=a.y;this.z/=a.z;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a):this.z=this.y=this.x=0;return this},min:function(a){this.x> +a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3,b=new THREE.Vector3);a.set(c,c,c);b.set(d,d,d);return this.clamp(a, +b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)},normalize:function(){return this.divideScalar(this.length())},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/ +b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;return this},cross:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(a,b);var c=this.x,d=this.y,e=this.z;this.x=d*a.z-e*a.y;this.y=e*a.x-c*a.z;this.z=c*a.y-d*a.x;return this},crossVectors:function(a,b){var c=a.x,d=a.y,e=a.z,f=b.x,g=b.y,h=b.z;this.x=d*h-e*g;this.y=e*f-c*h;this.z=c*g-d*f;return this}, +projectOnVector:function(){var a,b;return function(c){void 0===a&&(a=new THREE.Vector3);a.copy(c).normalize();b=this.dot(a);return this.copy(a).multiplyScalar(b)}}(),projectOnPlane:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);a.copy(this).projectOnVector(b);return this.sub(a)}}(),reflect:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);return this.sub(a.copy(b).multiplyScalar(2*this.dot(b)))}}(),angleTo:function(a){a=this.dot(a)/(this.length()*a.length()); +return Math.acos(THREE.Math.clamp(a,-1,1))},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b=this.x-a.x,c=this.y-a.y;a=this.z-a.z;return b*b+c*c+a*a},setEulerFromRotationMatrix:function(a,b){console.error("THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotationMatrix() instead.")},setEulerFromQuaternion:function(a,b){console.error("THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.")}, +getPositionFromMatrix:function(a){console.warn("THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().");return this.setFromMatrixPosition(a)},getScaleFromMatrix:function(a){console.warn("THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().");return this.setFromMatrixScale(a)},getColumnFromMatrix:function(a,b){console.warn("THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().");return this.setFromMatrixColumn(a, +b)},setFromMatrixPosition:function(a){this.x=a.elements[12];this.y=a.elements[13];this.z=a.elements[14];return this},setFromMatrixScale:function(a){var b=this.set(a.elements[0],a.elements[1],a.elements[2]).length(),c=this.set(a.elements[4],a.elements[5],a.elements[6]).length();a=this.set(a.elements[8],a.elements[9],a.elements[10]).length();this.x=b;this.y=c;this.z=a;return this},setFromMatrixColumn:function(a,b){var c=4*a,d=b.elements;this.x=d[c];this.y=d[c+1];this.z=d[c+2];return this},equals:function(a){return a.x=== +this.x&&a.y===this.y&&a.z===this.z},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;return a},clone:function(){return new THREE.Vector3(this.x,this.y,this.z)}};THREE.Vector4=function(a,b,c,d){this.x=a||0;this.y=b||0;this.z=c||0;this.w=void 0!==d?d:1}; +THREE.Vector4.prototype={constructor:THREE.Vector4,set:function(a,b,c,d){this.x=a;this.y=b;this.z=c;this.w=d;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setW:function(a){this.w=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;case 3:this.w=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x; +case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;this.w=void 0!==a.w?a.w:1;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;this.w+=a.w;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;this.w+=a;return this}, +addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;this.w=a.w+b.w;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;this.w-=a.w;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;this.w=a.w-b.w;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;this.w*=a;return this},applyMatrix4:function(a){var b= +this.x,c=this.y,d=this.z,e=this.w;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12]*e;this.y=a[1]*b+a[5]*c+a[9]*d+a[13]*e;this.z=a[2]*b+a[6]*c+a[10]*d+a[14]*e;this.w=a[3]*b+a[7]*c+a[11]*d+a[15]*e;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a,this.w*=a):(this.z=this.y=this.x=0,this.w=1);return this},setAxisAngleFromQuaternion:function(a){this.w=2*Math.acos(a.w);var b=Math.sqrt(1-a.w*a.w);1E-4>b?(this.x=1,this.z=this.y=0):(this.x=a.x/b,this.y=a.y/b,this.z=a.z/b);return this}, +setAxisAngleFromRotationMatrix:function(a){var b,c,d;a=a.elements;var e=a[0];d=a[4];var f=a[8],g=a[1],h=a[5],k=a[9];c=a[2];b=a[6];var n=a[10];if(.01>Math.abs(d-g)&&.01>Math.abs(f-c)&&.01>Math.abs(k-b)){if(.1>Math.abs(d+g)&&.1>Math.abs(f+c)&&.1>Math.abs(k+b)&&.1>Math.abs(e+h+n-3))return this.set(1,0,0,0),this;a=Math.PI;e=(e+1)/2;h=(h+1)/2;n=(n+1)/2;d=(d+g)/4;f=(f+c)/4;k=(k+b)/4;e>h&&e>n?.01>e?(b=0,d=c=.707106781):(b=Math.sqrt(e),c=d/b,d=f/b):h>n?.01>h?(b=.707106781,c=0,d=.707106781):(c=Math.sqrt(h), +b=d/c,d=k/c):.01>n?(c=b=.707106781,d=0):(d=Math.sqrt(n),b=f/d,c=k/d);this.set(b,c,d,a);return this}a=Math.sqrt((b-k)*(b-k)+(f-c)*(f-c)+(g-d)*(g-d));.001>Math.abs(a)&&(a=1);this.x=(b-k)/a;this.y=(f-c)/a;this.z=(g-d)/a;this.w=Math.acos((e+h+n-1)/2);return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);this.w>a.w&&(this.w=a.w);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);this.wb.w&&(this.w=b.w);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector4,b=new THREE.Vector4);a.set(c,c,c,c);b.set(d,d,d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);this.w=Math.floor(this.w); +return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);this.w=Math.ceil(this.w);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);this.w=Math.round(this.w);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);this.w=0>this.w?Math.ceil(this.w):Math.floor(this.w); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;this.w=-this.w;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z+this.w*a.w},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)+Math.abs(this.w)},normalize:function(){return this.divideScalar(this.length())}, +setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;this.w+=(a.w-this.w)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z&&a.w===this.w},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];this.w=a[b+3];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]= +this.z;a[b+3]=this.w;return a},clone:function(){return new THREE.Vector4(this.x,this.y,this.z,this.w)}};THREE.Euler=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._order=d||THREE.Euler.DefaultOrder};THREE.Euler.RotationOrders="XYZ YZX ZXY XZY YXZ ZYX".split(" ");THREE.Euler.DefaultOrder="XYZ"; +THREE.Euler.prototype={constructor:THREE.Euler,_x:0,_y:0,_z:0,_order:THREE.Euler.DefaultOrder,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get order(){return this._order},set order(a){this._order=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._order=d||this._order;this.onChangeCallback();return this},copy:function(a){this._x= +a._x;this._y=a._y;this._z=a._z;this._order=a._order;this.onChangeCallback();return this},setFromRotationMatrix:function(a,b){var c=THREE.Math.clamp,d=a.elements,e=d[0],f=d[4],g=d[8],h=d[1],k=d[5],n=d[9],p=d[2],q=d[6],d=d[10];b=b||this._order;"XYZ"===b?(this._y=Math.asin(c(g,-1,1)),.99999>Math.abs(g)?(this._x=Math.atan2(-n,d),this._z=Math.atan2(-f,e)):(this._x=Math.atan2(q,k),this._z=0)):"YXZ"===b?(this._x=Math.asin(-c(n,-1,1)),.99999>Math.abs(n)?(this._y=Math.atan2(g,d),this._z=Math.atan2(h,k)):(this._y= +Math.atan2(-p,e),this._z=0)):"ZXY"===b?(this._x=Math.asin(c(q,-1,1)),.99999>Math.abs(q)?(this._y=Math.atan2(-p,d),this._z=Math.atan2(-f,k)):(this._y=0,this._z=Math.atan2(h,e))):"ZYX"===b?(this._y=Math.asin(-c(p,-1,1)),.99999>Math.abs(p)?(this._x=Math.atan2(q,d),this._z=Math.atan2(h,e)):(this._x=0,this._z=Math.atan2(-f,k))):"YZX"===b?(this._z=Math.asin(c(h,-1,1)),.99999>Math.abs(h)?(this._x=Math.atan2(-n,k),this._y=Math.atan2(-p,e)):(this._x=0,this._y=Math.atan2(g,d))):"XZY"===b?(this._z=Math.asin(-c(f, +-1,1)),.99999>Math.abs(f)?(this._x=Math.atan2(q,k),this._y=Math.atan2(g,e)):(this._x=Math.atan2(-n,d),this._y=0)):console.warn("THREE.Euler: .setFromRotationMatrix() given unsupported order: "+b);this._order=b;this.onChangeCallback();return this},setFromQuaternion:function(a,b,c){var d=THREE.Math.clamp,e=a.x*a.x,f=a.y*a.y,g=a.z*a.z,h=a.w*a.w;b=b||this._order;"XYZ"===b?(this._x=Math.atan2(2*(a.x*a.w-a.y*a.z),h-e-f+g),this._y=Math.asin(d(2*(a.x*a.z+a.y*a.w),-1,1)),this._z=Math.atan2(2*(a.z*a.w-a.x* +a.y),h+e-f-g)):"YXZ"===b?(this._x=Math.asin(d(2*(a.x*a.w-a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h-e-f+g),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h-e+f-g)):"ZXY"===b?(this._x=Math.asin(d(2*(a.x*a.w+a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.y*a.w-a.z*a.x),h-e-f+g),this._z=Math.atan2(2*(a.z*a.w-a.x*a.y),h-e+f-g)):"ZYX"===b?(this._x=Math.atan2(2*(a.x*a.w+a.z*a.y),h-e-f+g),this._y=Math.asin(d(2*(a.y*a.w-a.x*a.z),-1,1)),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h+e-f-g)):"YZX"===b?(this._x=Math.atan2(2* +(a.x*a.w-a.z*a.y),h-e+f-g),this._y=Math.atan2(2*(a.y*a.w-a.x*a.z),h+e-f-g),this._z=Math.asin(d(2*(a.x*a.y+a.z*a.w),-1,1))):"XZY"===b?(this._x=Math.atan2(2*(a.x*a.w+a.y*a.z),h-e+f-g),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h+e-f-g),this._z=Math.asin(d(2*(a.z*a.w-a.x*a.y),-1,1))):console.warn("THREE.Euler: .setFromQuaternion() given unsupported order: "+b);this._order=b;if(!1!==c)this.onChangeCallback();return this},reorder:function(){var a=new THREE.Quaternion;return function(b){a.setFromEuler(this); +this.setFromQuaternion(a,b)}}(),equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._order===this._order},fromArray:function(a){this._x=a[0];this._y=a[1];this._z=a[2];void 0!==a[3]&&(this._order=a[3]);this.onChangeCallback();return this},toArray:function(){return[this._x,this._y,this._z,this._order]},onChange:function(a){this.onChangeCallback=a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Euler(this._x,this._y,this._z,this._order)}}; +THREE.Line3=function(a,b){this.start=void 0!==a?a:new THREE.Vector3;this.end=void 0!==b?b:new THREE.Vector3}; +THREE.Line3.prototype={constructor:THREE.Line3,set:function(a,b){this.start.copy(a);this.end.copy(b);return this},copy:function(a){this.start.copy(a.start);this.end.copy(a.end);return this},center:function(a){return(a||new THREE.Vector3).addVectors(this.start,this.end).multiplyScalar(.5)},delta:function(a){return(a||new THREE.Vector3).subVectors(this.end,this.start)},distanceSq:function(){return this.start.distanceToSquared(this.end)},distance:function(){return this.start.distanceTo(this.end)},at:function(a, +b){var c=b||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},closestPointToPointParameter:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d){a.subVectors(c,this.start);b.subVectors(this.end,this.start);var e=b.dot(b),e=b.dot(a)/e;d&&(e=THREE.Math.clamp(e,0,1));return e}}(),closestPointToPoint:function(a,b,c){a=this.closestPointToPointParameter(a,b);c=c||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},applyMatrix4:function(a){this.start.applyMatrix4(a); +this.end.applyMatrix4(a);return this},equals:function(a){return a.start.equals(this.start)&&a.end.equals(this.end)},clone:function(){return(new THREE.Line3).copy(this)}};THREE.Box2=function(a,b){this.min=void 0!==a?a:new THREE.Vector2(Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector2(-Infinity,-Infinity)}; +THREE.Box2.prototype={constructor:THREE.Box2,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector2).set((a.x-this.min.x)/(this.max.x-this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.y +this.max.y?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector2).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector2;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},translate:function(a){this.min.add(a);this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&& +a.max.equals(this.max)},clone:function(){return(new THREE.Box2).copy(this)}};THREE.Box3=function(a,b){this.min=void 0!==a?a:new THREE.Vector3(Infinity,Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector3(-Infinity,-Infinity,-Infinity)}; +THREE.Box3.prototype={constructor:THREE.Box3,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y||a.zthis.max.z?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y&&this.min.z<=a.min.z&&a.max.z<=this.max.z?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector3).set((a.x-this.min.x)/(this.max.x- +this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y),(a.z-this.min.z)/(this.max.z-this.min.z))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.ythis.max.y||a.max.zthis.max.z?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector3).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),getBoundingSphere:function(){var a= +new THREE.Vector3;return function(b){b=b||new THREE.Sphere;b.center=this.center();b.radius=.5*this.size(a).length();return b}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},applyMatrix4:function(){var a=[new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3];return function(b){a[0].set(this.min.x,this.min.y, +this.min.z).applyMatrix4(b);a[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(b);a[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(b);a[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(b);a[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(b);a[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(b);a[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(b);a[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(b);this.makeEmpty();this.setFromPoints(a);return this}}(),translate:function(a){this.min.add(a); +this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&&a.max.equals(this.max)},clone:function(){return(new THREE.Box3).copy(this)}};THREE.Matrix3=function(){this.elements=new Float32Array([1,0,0,0,1,0,0,0,1]);0this.determinant()&&(g=-g);c.x=f[12];c.y=f[13];c.z=f[14];b.elements.set(this.elements);c=1/g;var f=1/h,n=1/k;b.elements[0]*=c;b.elements[1]*= +c;b.elements[2]*=c;b.elements[4]*=f;b.elements[5]*=f;b.elements[6]*=f;b.elements[8]*=n;b.elements[9]*=n;b.elements[10]*=n;d.setFromRotationMatrix(b);e.x=g;e.y=h;e.z=k;return this}}(),makeFrustum:function(a,b,c,d,e,f){var g=this.elements;g[0]=2*e/(b-a);g[4]=0;g[8]=(b+a)/(b-a);g[12]=0;g[1]=0;g[5]=2*e/(d-c);g[9]=(d+c)/(d-c);g[13]=0;g[2]=0;g[6]=0;g[10]=-(f+e)/(f-e);g[14]=-2*f*e/(f-e);g[3]=0;g[7]=0;g[11]=-1;g[15]=0;return this},makePerspective:function(a,b,c,d){a=c*Math.tan(THREE.Math.degToRad(.5*a)); +var e=-a;return this.makeFrustum(e*b,a*b,e,a,c,d)},makeOrthographic:function(a,b,c,d,e,f){var g=this.elements,h=b-a,k=c-d,n=f-e;g[0]=2/h;g[4]=0;g[8]=0;g[12]=-((b+a)/h);g[1]=0;g[5]=2/k;g[9]=0;g[13]=-((c+d)/k);g[2]=0;g[6]=0;g[10]=-2/n;g[14]=-((f+e)/n);g[3]=0;g[7]=0;g[11]=0;g[15]=1;return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements;return[a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15]]},clone:function(){return(new THREE.Matrix4).fromArray(this.elements)}}; +THREE.Ray=function(a,b){this.origin=void 0!==a?a:new THREE.Vector3;this.direction=void 0!==b?b:new THREE.Vector3}; +THREE.Ray.prototype={constructor:THREE.Ray,set:function(a,b){this.origin.copy(a);this.direction.copy(b);return this},copy:function(a){this.origin.copy(a.origin);this.direction.copy(a.direction);return this},at:function(a,b){return(b||new THREE.Vector3).copy(this.direction).multiplyScalar(a).add(this.origin)},recast:function(){var a=new THREE.Vector3;return function(b){this.origin.copy(this.at(b,a));return this}}(),closestPointToPoint:function(a,b){var c=b||new THREE.Vector3;c.subVectors(a,this.origin); +var d=c.dot(this.direction);return 0>d?c.copy(this.origin):c.copy(this.direction).multiplyScalar(d).add(this.origin)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){var c=a.subVectors(b,this.origin).dot(this.direction);if(0>c)return this.origin.distanceTo(b);a.copy(this.direction).multiplyScalar(c).add(this.origin);return a.distanceTo(b)}}(),distanceSqToSegment:function(a,b,c,d){var e=a.clone().add(b).multiplyScalar(.5),f=b.clone().sub(a).normalize(),g=.5*a.distanceTo(b),h= +this.origin.clone().sub(e);a=-this.direction.dot(f);b=h.dot(this.direction);var k=-h.dot(f),n=h.lengthSq(),p=Math.abs(1-a*a),q,m;0<=p?(h=a*k-b,q=a*b-k,m=g*p,0<=h?q>=-m?q<=m?(g=1/p,h*=g,q*=g,a=h*(h+a*q+2*b)+q*(a*h+q+2*k)+n):(q=g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):(q=-g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):q<=-m?(h=Math.max(0,-(-a*g+b)),q=0f)return null;f=Math.sqrt(f-e);e=d-f; +d+=f;return 0>e&&0>d?null:0>e?this.at(d,c):this.at(e,c)}}(),isIntersectionPlane:function(a){var b=a.distanceToPoint(this.origin);return 0===b||0>a.normal.dot(this.direction)*b?!0:!1},distanceToPlane:function(a){var b=a.normal.dot(this.direction);if(0==b)return 0==a.distanceToPoint(this.origin)?0:null;a=-(this.origin.dot(a.normal)+a.constant)/b;return 0<=a?a:null},intersectPlane:function(a,b){var c=this.distanceToPlane(a);return null===c?null:this.at(c,b)},isIntersectionBox:function(){var a=new THREE.Vector3; +return function(b){return null!==this.intersectBox(b,a)}}(),intersectBox:function(a,b){var c,d,e,f,g;d=1/this.direction.x;f=1/this.direction.y;g=1/this.direction.z;var h=this.origin;0<=d?(c=(a.min.x-h.x)*d,d*=a.max.x-h.x):(c=(a.max.x-h.x)*d,d*=a.min.x-h.x);0<=f?(e=(a.min.y-h.y)*f,f*=a.max.y-h.y):(e=(a.max.y-h.y)*f,f*=a.min.y-h.y);if(c>f||e>d)return null;if(e>c||c!==c)c=e;if(fg||e>d)return null;if(e>c||c!== +c)c=e;if(gd?null:this.at(0<=c?c:d,b)},intersectTriangle:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Vector3;return function(e,f,g,h,k){b.subVectors(f,e);c.subVectors(g,e);d.crossVectors(b,c);f=this.direction.dot(d);if(0f)h=-1,f=-f;else return null;a.subVectors(this.origin,e);e=h*this.direction.dot(c.crossVectors(a,c));if(0>e)return null;g=h*this.direction.dot(b.cross(a));if(0>g||e+g>f)return null; +e=-h*a.dot(d);return 0>e?null:this.at(e/f,k)}}(),applyMatrix4:function(a){this.direction.add(this.origin).applyMatrix4(a);this.origin.applyMatrix4(a);this.direction.sub(this.origin);this.direction.normalize();return this},equals:function(a){return a.origin.equals(this.origin)&&a.direction.equals(this.direction)},clone:function(){return(new THREE.Ray).copy(this)}};THREE.Sphere=function(a,b){this.center=void 0!==a?a:new THREE.Vector3;this.radius=void 0!==b?b:0}; +THREE.Sphere.prototype={constructor:THREE.Sphere,set:function(a,b){this.center.copy(a);this.radius=b;return this},setFromPoints:function(){var a=new THREE.Box3;return function(b,c){var d=this.center;void 0!==c?d.copy(c):a.setFromPoints(b).center(d);for(var e=0,f=0,g=b.length;f=this.radius},containsPoint:function(a){return a.distanceToSquared(this.center)<= +this.radius*this.radius},distanceToPoint:function(a){return a.distanceTo(this.center)-this.radius},intersectsSphere:function(a){var b=this.radius+a.radius;return a.center.distanceToSquared(this.center)<=b*b},clampPoint:function(a,b){var c=this.center.distanceToSquared(a),d=b||new THREE.Vector3;d.copy(a);c>this.radius*this.radius&&(d.sub(this.center).normalize(),d.multiplyScalar(this.radius).add(this.center));return d},getBoundingBox:function(a){a=a||new THREE.Box3;a.set(this.center,this.center);a.expandByScalar(this.radius); +return a},applyMatrix4:function(a){this.center.applyMatrix4(a);this.radius*=a.getMaxScaleOnAxis();return this},translate:function(a){this.center.add(a);return this},equals:function(a){return a.center.equals(this.center)&&a.radius===this.radius},clone:function(){return(new THREE.Sphere).copy(this)}}; +THREE.Frustum=function(a,b,c,d,e,f){this.planes=[void 0!==a?a:new THREE.Plane,void 0!==b?b:new THREE.Plane,void 0!==c?c:new THREE.Plane,void 0!==d?d:new THREE.Plane,void 0!==e?e:new THREE.Plane,void 0!==f?f:new THREE.Plane]}; +THREE.Frustum.prototype={constructor:THREE.Frustum,set:function(a,b,c,d,e,f){var g=this.planes;g[0].copy(a);g[1].copy(b);g[2].copy(c);g[3].copy(d);g[4].copy(e);g[5].copy(f);return this},copy:function(a){for(var b=this.planes,c=0;6>c;c++)b[c].copy(a.planes[c]);return this},setFromMatrix:function(a){var b=this.planes,c=a.elements;a=c[0];var d=c[1],e=c[2],f=c[3],g=c[4],h=c[5],k=c[6],n=c[7],p=c[8],q=c[9],m=c[10],r=c[11],t=c[12],s=c[13],u=c[14],c=c[15];b[0].setComponents(f-a,n-g,r-p,c-t).normalize();b[1].setComponents(f+ +a,n+g,r+p,c+t).normalize();b[2].setComponents(f+d,n+h,r+q,c+s).normalize();b[3].setComponents(f-d,n-h,r-q,c-s).normalize();b[4].setComponents(f-e,n-k,r-m,c-u).normalize();b[5].setComponents(f+e,n+k,r+m,c+u).normalize();return this},intersectsObject:function(){var a=new THREE.Sphere;return function(b){var c=b.geometry;null===c.boundingSphere&&c.computeBoundingSphere();a.copy(c.boundingSphere);a.applyMatrix4(b.matrixWorld);return this.intersectsSphere(a)}}(),intersectsSphere:function(a){var b=this.planes, +c=a.center;a=-a.radius;for(var d=0;6>d;d++)if(b[d].distanceToPoint(c)e;e++){var f=d[e];a.x=0g&&0>f)return!1}return!0}}(), +containsPoint:function(a){for(var b=this.planes,c=0;6>c;c++)if(0>b[c].distanceToPoint(a))return!1;return!0},clone:function(){return(new THREE.Frustum).copy(this)}};THREE.Plane=function(a,b){this.normal=void 0!==a?a:new THREE.Vector3(1,0,0);this.constant=void 0!==b?b:0}; +THREE.Plane.prototype={constructor:THREE.Plane,set:function(a,b){this.normal.copy(a);this.constant=b;return this},setComponents:function(a,b,c,d){this.normal.set(a,b,c);this.constant=d;return this},setFromNormalAndCoplanarPoint:function(a,b){this.normal.copy(a);this.constant=-b.dot(this.normal);return this},setFromCoplanarPoints:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d,e){d=a.subVectors(e,d).cross(b.subVectors(c,d)).normalize();this.setFromNormalAndCoplanarPoint(d, +c);return this}}(),copy:function(a){this.normal.copy(a.normal);this.constant=a.constant;return this},normalize:function(){var a=1/this.normal.length();this.normal.multiplyScalar(a);this.constant*=a;return this},negate:function(){this.constant*=-1;this.normal.negate();return this},distanceToPoint:function(a){return this.normal.dot(a)+this.constant},distanceToSphere:function(a){return this.distanceToPoint(a.center)-a.radius},projectPoint:function(a,b){return this.orthoPoint(a,b).sub(a).negate()},orthoPoint:function(a, +b){var c=this.distanceToPoint(a);return(b||new THREE.Vector3).copy(this.normal).multiplyScalar(c)},isIntersectionLine:function(a){var b=this.distanceToPoint(a.start);a=this.distanceToPoint(a.end);return 0>b&&0a&&0f||1e;e++)8==e||13==e||18==e||23==e?b[e]="-":14==e?b[e]="4":(2>=c&&(c=33554432+16777216*Math.random()|0),d=c&15,c>>=4,b[e]=a[19==e?d&3|8:d]);return b.join("")}}(),clamp:function(a,b,c){return ac?c:a},clampBottom:function(a,b){return a=c)return 1;a=(a-b)/(c-b);return a*a*(3-2*a)},smootherstep:function(a,b,c){if(a<=b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*a*(a*(6*a-15)+10)},random16:function(){return(65280*Math.random()+255*Math.random())/65535},randInt:function(a,b){return a+Math.floor(Math.random()*(b-a+1))},randFloat:function(a,b){return a+Math.random()*(b-a)},randFloatSpread:function(a){return a*(.5-Math.random())},degToRad:function(){var a=Math.PI/180;return function(b){return b*a}}(),radToDeg:function(){var a= +180/Math.PI;return function(b){return b*a}}(),isPowerOfTwo:function(a){return 0===(a&a-1)&&0!==a}}; +THREE.Spline=function(a){function b(a,b,c,d,e,f,g){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*g+(-3*(b-c)-2*a-d)*f+a*e+b}this.points=a;var c=[],d={x:0,y:0,z:0},e,f,g,h,k,n,p,q,m;this.initFromArray=function(a){this.points=[];for(var b=0;bthis.points.length-2?this.points.length-1:f+1;c[3]=f>this.points.length-3?this.points.length-1:f+ +2;n=this.points[c[0]];p=this.points[c[1]];q=this.points[c[2]];m=this.points[c[3]];h=g*g;k=g*h;d.x=b(n.x,p.x,q.x,m.x,g,h,k);d.y=b(n.y,p.y,q.y,m.y,g,h,k);d.z=b(n.z,p.z,q.z,m.z,g,h,k);return d};this.getControlPointsArray=function(){var a,b,c=this.points.length,d=[];for(a=0;a=b.x+b.y}}(); +THREE.Triangle.prototype={constructor:THREE.Triangle,set:function(a,b,c){this.a.copy(a);this.b.copy(b);this.c.copy(c);return this},setFromPointsAndIndices:function(a,b,c,d){this.a.copy(a[b]);this.b.copy(a[c]);this.c.copy(a[d]);return this},copy:function(a){this.a.copy(a.a);this.b.copy(a.b);this.c.copy(a.c);return this},area:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){a.subVectors(this.c,this.b);b.subVectors(this.a,this.b);return.5*a.cross(b).length()}}(),midpoint:function(a){return(a|| +new THREE.Vector3).addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)},normal:function(a){return THREE.Triangle.normal(this.a,this.b,this.c,a)},plane:function(a){return(a||new THREE.Plane).setFromCoplanarPoints(this.a,this.b,this.c)},barycoordFromPoint:function(a,b){return THREE.Triangle.barycoordFromPoint(a,this.a,this.b,this.c,b)},containsPoint:function(a){return THREE.Triangle.containsPoint(a,this.a,this.b,this.c)},equals:function(a){return a.a.equals(this.a)&&a.b.equals(this.b)&&a.c.equals(this.c)}, +clone:function(){return(new THREE.Triangle).copy(this)}};THREE.Clock=function(a){this.autoStart=void 0!==a?a:!0;this.elapsedTime=this.oldTime=this.startTime=0;this.running=!1}; +THREE.Clock.prototype={constructor:THREE.Clock,start:function(){this.oldTime=this.startTime=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now();this.running=!0},stop:function(){this.getElapsedTime();this.running=!1},getElapsedTime:function(){this.getDelta();return this.elapsedTime},getDelta:function(){var a=0;this.autoStart&&!this.running&&this.start();if(this.running){var b=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now(), +a=.001*(b-this.oldTime);this.oldTime=b;this.elapsedTime+=a}return a}};THREE.EventDispatcher=function(){}; +THREE.EventDispatcher.prototype={constructor:THREE.EventDispatcher,apply:function(a){a.addEventListener=THREE.EventDispatcher.prototype.addEventListener;a.hasEventListener=THREE.EventDispatcher.prototype.hasEventListener;a.removeEventListener=THREE.EventDispatcher.prototype.removeEventListener;a.dispatchEvent=THREE.EventDispatcher.prototype.dispatchEvent},addEventListener:function(a,b){void 0===this._listeners&&(this._listeners={});var c=this._listeners;void 0===c[a]&&(c[a]=[]);-1===c[a].indexOf(b)&& +c[a].push(b)},hasEventListener:function(a,b){if(void 0===this._listeners)return!1;var c=this._listeners;return void 0!==c[a]&&-1!==c[a].indexOf(b)?!0:!1},removeEventListener:function(a,b){if(void 0!==this._listeners){var c=this._listeners[a];if(void 0!==c){var d=c.indexOf(b);-1!==d&&c.splice(d,1)}}},dispatchEvent:function(a){if(void 0!==this._listeners){var b=this._listeners[a.type];if(void 0!==b){a.target=this;for(var c=[],d=b.length,e=0;eza?-1:1;h[4*a]=la.x;h[4*a+1]=la.y;h[4*a+2]=la.z;h[4*a+3]=Ga}if(void 0===this.attributes.index||void 0===this.attributes.position||void 0===this.attributes.normal||void 0===this.attributes.uv)console.warn("Missing required attributes (index, position, normal or uv) in BufferGeometry.computeTangents()");else{var c=this.attributes.index.array,d=this.attributes.position.array, +e=this.attributes.normal.array,f=this.attributes.uv.array,g=d.length/3;void 0===this.attributes.tangent&&this.addAttribute("tangent",new THREE.BufferAttribute(new Float32Array(4*g),4));for(var h=this.attributes.tangent.array,k=[],n=[],p=0;ps;s++)t=a[3*c+s],-1==m[t]?(q[2*s]=t,q[2*s+1]=-1,p++):m[t]k.index+b)for(k={start:f,count:0,index:g},h.push(k),p=0;6>p;p+=2)s=q[p+1],-1p;p+=2)t=q[p],s=q[p+1],-1===s&&(s=g++),m[t]=s,r[s]=t,e[f++]=s-k.index,k.count++}this.reorderBuffers(e,r,g);return this.offsets=h},merge:function(){console.log("BufferGeometry.merge(): TODO")},normalizeNormals:function(){for(var a=this.attributes.normal.array,b,c,d,e=0,f=a.length;ed?-1:1,e.vertexTangents[c]=new THREE.Vector4(w.x,w.y,w.z,d);this.hasTangents=!0},computeLineDistances:function(){for(var a=0,b=this.vertices,c=0,d=b.length;cd;d++)if(e[d]==e[(d+1)%3]){a.push(f);break}for(f=a.length-1;0<=f;f--)for(e=a[f],this.faces.splice(e,1),c=0,g=this.faceVertexUvs.length;ca.opacity)h.transparent=a.transparent;void 0!==a.depthTest&&(h.depthTest=a.depthTest);void 0!==a.depthWrite&&(h.depthWrite=a.depthWrite);void 0!==a.visible&&(h.visible=a.visible);void 0!==a.flipSided&&(h.side=THREE.BackSide);void 0!==a.doubleSided&&(h.side=THREE.DoubleSide);void 0!==a.wireframe&&(h.wireframe=a.wireframe);void 0!==a.vertexColors&&("face"=== +a.vertexColors?h.vertexColors=THREE.FaceColors:a.vertexColors&&(h.vertexColors=THREE.VertexColors));a.colorDiffuse?h.color=e(a.colorDiffuse):a.DbgColor&&(h.color=a.DbgColor);a.colorSpecular&&(h.specular=e(a.colorSpecular));a.colorAmbient&&(h.ambient=e(a.colorAmbient));a.colorEmissive&&(h.emissive=e(a.colorEmissive));a.transparency&&(h.opacity=a.transparency);a.specularCoef&&(h.shininess=a.specularCoef);a.mapDiffuse&&b&&d(h,"map",a.mapDiffuse,a.mapDiffuseRepeat,a.mapDiffuseOffset,a.mapDiffuseWrap, +a.mapDiffuseAnisotropy);a.mapLight&&b&&d(h,"lightMap",a.mapLight,a.mapLightRepeat,a.mapLightOffset,a.mapLightWrap,a.mapLightAnisotropy);a.mapBump&&b&&d(h,"bumpMap",a.mapBump,a.mapBumpRepeat,a.mapBumpOffset,a.mapBumpWrap,a.mapBumpAnisotropy);a.mapNormal&&b&&d(h,"normalMap",a.mapNormal,a.mapNormalRepeat,a.mapNormalOffset,a.mapNormalWrap,a.mapNormalAnisotropy);a.mapSpecular&&b&&d(h,"specularMap",a.mapSpecular,a.mapSpecularRepeat,a.mapSpecularOffset,a.mapSpecularWrap,a.mapSpecularAnisotropy);a.mapAlpha&& +b&&d(h,"alphaMap",a.mapAlpha,a.mapAlphaRepeat,a.mapAlphaOffset,a.mapAlphaWrap,a.mapAlphaAnisotropy);a.mapBumpScale&&(h.bumpScale=a.mapBumpScale);a.mapNormal?(g=THREE.ShaderLib.normalmap,k=THREE.UniformsUtils.clone(g.uniforms),k.tNormal.value=h.normalMap,a.mapNormalFactor&&k.uNormalScale.value.set(a.mapNormalFactor,a.mapNormalFactor),h.map&&(k.tDiffuse.value=h.map,k.enableDiffuse.value=!0),h.specularMap&&(k.tSpecular.value=h.specularMap,k.enableSpecular.value=!0),h.lightMap&&(k.tAO.value=h.lightMap, +k.enableAO.value=!0),k.diffuse.value.setHex(h.color),k.specular.value.setHex(h.specular),k.ambient.value.setHex(h.ambient),k.shininess.value=h.shininess,void 0!==h.opacity&&(k.opacity.value=h.opacity),g=new THREE.ShaderMaterial({fragmentShader:g.fragmentShader,vertexShader:g.vertexShader,uniforms:k,lights:!0,fog:!0}),h.transparent&&(g.transparent=!0)):g=new THREE[g](h);void 0!==a.DbgName&&(g.name=a.DbgName);return g}}; +THREE.Loader.Handlers={handlers:[],add:function(a,b){this.handlers.push(a,b)},get:function(a){for(var b=0,c=this.handlers.length;bg;g++)m=y[k++],v=u[2*m],m=u[2*m+1],v=new THREE.Vector2(v,m),2!==g&&c.faceVertexUvs[d][h].push(v),0!==g&&c.faceVertexUvs[d][h+1].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]),s.normal.copy(r.normal));if(t)for(d=0;4>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++], +G[q++],G[q]),2!==d&&r.vertexNormals.push(t),0!==d&&s.vertexNormals.push(t);p&&(p=y[k++],p=w[p],r.color.setHex(p),s.color.setHex(p));if(b)for(d=0;4>d;d++)p=y[k++],p=w[p],2!==d&&r.vertexColors.push(new THREE.Color(p)),0!==d&&s.vertexColors.push(new THREE.Color(p));c.faces.push(r);c.faces.push(s)}else{r=new THREE.Face3;r.a=y[k++];r.b=y[k++];r.c=y[k++];h&&(h=y[k++],r.materialIndex=h);h=c.faces.length;if(d)for(d=0;dg;g++)m=y[k++],v=u[2*m],m=u[2*m+1], +v=new THREE.Vector2(v,m),c.faceVertexUvs[d][h].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]));if(t)for(d=0;3>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++],G[q++],G[q]),r.vertexNormals.push(t);p&&(p=y[k++],r.color.setHex(w[p]));if(b)for(d=0;3>d;d++)p=y[k++],r.vertexColors.push(new THREE.Color(w[p]));c.faces.push(r)}})(d);(function(){var b=void 0!==a.influencesPerVertex?a.influencesPerVertex:2;if(a.skinWeights)for(var d=0,g=a.skinWeights.length;dthis.opacity&&(a.opacity=this.opacity);!1!==this.transparent&&(a.transparent=this.transparent);!1!==this.wireframe&&(a.wireframe=this.wireframe);return a},clone:function(a){void 0===a&&(a=new THREE.Material);a.name=this.name;a.side=this.side;a.opacity=this.opacity;a.transparent=this.transparent;a.blending=this.blending;a.blendSrc=this.blendSrc;a.blendDst=this.blendDst;a.blendEquation=this.blendEquation;a.depthTest=this.depthTest;a.depthWrite=this.depthWrite;a.polygonOffset=this.polygonOffset;a.polygonOffsetFactor= +this.polygonOffsetFactor;a.polygonOffsetUnits=this.polygonOffsetUnits;a.alphaTest=this.alphaTest;a.overdraw=this.overdraw;a.visible=this.visible;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Material.prototype);THREE.MaterialIdCount=0; +THREE.LineBasicMaterial=function(a){THREE.Material.call(this);this.type="LineBasicMaterial";this.color=new THREE.Color(16777215);this.linewidth=1;this.linejoin=this.linecap="round";this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.LineBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineBasicMaterial.prototype.clone=function(){var a=new THREE.LineBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.linecap=this.linecap;a.linejoin=this.linejoin;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.LineDashedMaterial=function(a){THREE.Material.call(this);this.type="LineDashedMaterial";this.color=new THREE.Color(16777215);this.scale=this.linewidth=1;this.dashSize=3;this.gapSize=1;this.vertexColors=!1;this.fog=!0;this.setValues(a)};THREE.LineDashedMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineDashedMaterial.prototype.clone=function(){var a=new THREE.LineDashedMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.scale=this.scale;a.dashSize=this.dashSize;a.gapSize=this.gapSize;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.MeshBasicMaterial=function(a){THREE.Material.call(this);this.type="MeshBasicMaterial";this.color=new THREE.Color(16777215);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphTargets=this.skinning=!1;this.setValues(a)}; +THREE.MeshBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshBasicMaterial.prototype.clone=function(){var a=new THREE.MeshBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap; +a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;return a}; +THREE.MeshLambertMaterial=function(a){THREE.Material.call(this);this.type="MeshLambertMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.wrapAround=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth= +1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshLambertMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshLambertMaterial.prototype.clone=function(){var a=new THREE.MeshLambertMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog; +a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshPhongMaterial=function(a){THREE.Material.call(this);this.type="MeshPhongMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.specular=new THREE.Color(1118481);this.shininess=30;this.wrapAround=this.metal=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.bumpMap=this.lightMap=this.map=null;this.bumpScale=1;this.normalMap=null;this.normalScale=new THREE.Vector2(1,1);this.envMap=this.alphaMap=this.specularMap=null;this.combine= +THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshPhongMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshPhongMaterial.prototype.clone=function(){var a=new THREE.MeshPhongMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.specular.copy(this.specular);a.shininess=this.shininess;a.metal=this.metal;a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.bumpMap=this.bumpMap;a.bumpScale=this.bumpScale;a.normalMap=this.normalMap;a.normalScale.copy(this.normalScale); +a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshDepthMaterial=function(a){THREE.Material.call(this);this.type="MeshDepthMaterial";this.wireframe=this.morphTargets=!1;this.wireframeLinewidth=1;this.setValues(a)};THREE.MeshDepthMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshDepthMaterial.prototype.clone=function(){var a=new THREE.MeshDepthMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a}; +THREE.MeshNormalMaterial=function(a){THREE.Material.call(this,a);this.type="MeshNormalMaterial";this.shading=THREE.FlatShading;this.wireframe=!1;this.wireframeLinewidth=1;this.morphTargets=!1;this.setValues(a)};THREE.MeshNormalMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshNormalMaterial.prototype.clone=function(){var a=new THREE.MeshNormalMaterial;THREE.Material.prototype.clone.call(this,a);a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshFaceMaterial=function(a){this.uuid=THREE.Math.generateUUID();this.type="MeshFaceMaterial";this.materials=a instanceof Array?a:[]}; +THREE.MeshFaceMaterial.prototype={constructor:THREE.MeshFaceMaterial,toJSON:function(){for(var a={metadata:{version:4.2,type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type,materials:[]},b=0,c=this.materials.length;bf)){var m=b.origin.distanceTo(n);md.far||e.push({distance:m,point:k.clone().applyMatrix4(this.matrixWorld),face:null,faceIndex:null,object:this})}}}();THREE.Line.prototype.clone=function(a){void 0===a&&(a=new THREE.Line(this.geometry,this.material,this.mode));THREE.Object3D.prototype.clone.call(this,a);return a}; +THREE.Mesh=function(a,b){THREE.Object3D.call(this);this.type="Mesh";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.MeshBasicMaterial({color:16777215*Math.random()});this.updateMorphTargets()};THREE.Mesh.prototype=Object.create(THREE.Object3D.prototype); +THREE.Mesh.prototype.updateMorphTargets=function(){if(void 0!==this.geometry.morphTargets&&0g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this})}}}else for(s=p.position.array,t=k=0,w=s.length;k +g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this}))}}else if(k instanceof THREE.Geometry)for(t=this.material instanceof THREE.MeshFaceMaterial,s=!0===t?this.material.materials:null,r=g.precision,u=k.vertices,v=0,y=k.faces.length;vg.far||h.push({distance:x,point:K,face:G,faceIndex:v,object:this}))}}}();THREE.Mesh.prototype.clone=function(a,b){void 0===a&&(a=new THREE.Mesh(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a,b);return a};THREE.Bone=function(a){THREE.Object3D.call(this);this.skin=a};THREE.Bone.prototype=Object.create(THREE.Object3D.prototype); +THREE.Skeleton=function(a,b,c){this.useVertexTexture=void 0!==c?c:!0;this.identityMatrix=new THREE.Matrix4;a=a||[];this.bones=a.slice(0);this.useVertexTexture?(this.boneTextureHeight=this.boneTextureWidth=a=256h.end&&(h.end=e);b||(b=g)}}a.firstAnimation=b}; +THREE.MorphAnimMesh.prototype.setAnimationLabel=function(a,b,c){this.geometry.animations||(this.geometry.animations={});this.geometry.animations[a]={start:b,end:c}};THREE.MorphAnimMesh.prototype.playAnimation=function(a,b){var c=this.geometry.animations[a];c?(this.setFrameRange(c.start,c.end),this.duration=(c.end-c.start)/b*1E3,this.time=0):console.warn("animation["+a+"] undefined")}; +THREE.MorphAnimMesh.prototype.updateAnimation=function(a){var b=this.duration/this.length;this.time+=this.direction*a;if(this.mirroredLoop){if(this.time>this.duration||0>this.time)this.direction*=-1,this.time>this.duration&&(this.time=this.duration,this.directionBackwards=!0),0>this.time&&(this.time=0,this.directionBackwards=!1)}else this.time%=this.duration,0>this.time&&(this.time+=this.duration);a=this.startKeyframe+THREE.Math.clamp(Math.floor(this.time/b),0,this.length-1);a!==this.currentKeyframe&& +(this.morphTargetInfluences[this.lastKeyframe]=0,this.morphTargetInfluences[this.currentKeyframe]=1,this.morphTargetInfluences[a]=0,this.lastKeyframe=this.currentKeyframe,this.currentKeyframe=a);b=this.time%b/b;this.directionBackwards&&(b=1-b);this.morphTargetInfluences[this.currentKeyframe]=b;this.morphTargetInfluences[this.lastKeyframe]=1-b}; +THREE.MorphAnimMesh.prototype.interpolateTargets=function(a,b,c){for(var d=this.morphTargetInfluences,e=0,f=d.length;e=this.objects[d].distance)this.objects[d-1].object.visible=!1,this.objects[d].object.visible=!0;else break;for(;dthis.scale.x||c.push({distance:d,point:this.position,face:null,object:this})}}();THREE.Sprite.prototype.clone=function(a){void 0===a&&(a=new THREE.Sprite(this.material));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.Particle=THREE.Sprite; +THREE.LensFlare=function(a,b,c,d,e){THREE.Object3D.call(this);this.lensFlares=[];this.positionScreen=new THREE.Vector3;this.customUpdateCallback=void 0;void 0!==a&&this.add(a,b,c,d,e)};THREE.LensFlare.prototype=Object.create(THREE.Object3D.prototype); +THREE.LensFlare.prototype.add=function(a,b,c,d,e,f){void 0===b&&(b=-1);void 0===c&&(c=0);void 0===f&&(f=1);void 0===e&&(e=new THREE.Color(16777215));void 0===d&&(d=THREE.NormalBlending);c=Math.min(c,Math.max(0,c));this.lensFlares.push({texture:a,size:b,distance:c,x:0,y:0,z:0,scale:1,rotation:1,opacity:f,color:e,blending:d})}; +THREE.LensFlare.prototype.updateLensFlares=function(){var a,b=this.lensFlares.length,c,d=2*-this.positionScreen.x,e=2*-this.positionScreen.y;for(a=0;a dashSize ) {\n\t\tdiscard;\n\t}\n\tgl_FragColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.fog_fragment, +"}"].join("\n")},depth:{uniforms:{mNear:{type:"f",value:1},mFar:{type:"f",value:2E3},opacity:{type:"f",value:1}},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float mNear;\nuniform float mFar;\nuniform float opacity;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment, +"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\t#else\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\t#endif\n\tfloat color = 1.0 - smoothstep( mNear, mFar, depth );\n\tgl_FragColor = vec4( vec3( color ), opacity );\n}"].join("\n")},normal:{uniforms:{opacity:{type:"f",value:1}},vertexShader:["varying vec3 vNormal;",THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvNormal = normalize( normalMatrix * normal );", +THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float opacity;\nvarying vec3 vNormal;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = vec4( 0.5 * normalize( vNormal ) + 0.5, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},normalmap:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{enableAO:{type:"i", +value:0},enableDiffuse:{type:"i",value:0},enableSpecular:{type:"i",value:0},enableReflection:{type:"i",value:0},enableDisplacement:{type:"i",value:0},tDisplacement:{type:"t",value:null},tDiffuse:{type:"t",value:null},tCube:{type:"t",value:null},tNormal:{type:"t",value:null},tSpecular:{type:"t",value:null},tAO:{type:"t",value:null},uNormalScale:{type:"v2",value:new THREE.Vector2(1,1)},uDisplacementBias:{type:"f",value:0},uDisplacementScale:{type:"f",value:1},diffuse:{type:"c",value:new THREE.Color(16777215)}, +specular:{type:"c",value:new THREE.Color(1118481)},ambient:{type:"c",value:new THREE.Color(16777215)},shininess:{type:"f",value:30},opacity:{type:"f",value:1},useRefract:{type:"i",value:0},refractionRatio:{type:"f",value:.98},reflectivity:{type:"f",value:.5},uOffset:{type:"v2",value:new THREE.Vector2(0,0)},uRepeat:{type:"v2",value:new THREE.Vector2(1,1)},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),fragmentShader:["uniform vec3 ambient;\nuniform vec3 diffuse;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\nuniform bool enableDiffuse;\nuniform bool enableSpecular;\nuniform bool enableAO;\nuniform bool enableReflection;\nuniform sampler2D tDiffuse;\nuniform sampler2D tNormal;\nuniform sampler2D tSpecular;\nuniform sampler2D tAO;\nuniform samplerCube tCube;\nuniform vec2 uNormalScale;\nuniform bool useRefract;\nuniform float refractionRatio;\nuniform float reflectivity;\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nuniform vec3 ambientLightColor;\n#if MAX_DIR_LIGHTS > 0\n\tuniform vec3 directionalLightColor[ MAX_DIR_LIGHTS ];\n\tuniform vec3 directionalLightDirection[ MAX_DIR_LIGHTS ];\n#endif\n#if MAX_HEMI_LIGHTS > 0\n\tuniform vec3 hemisphereLightSkyColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightGroundColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightDirection[ MAX_HEMI_LIGHTS ];\n#endif\n#if MAX_POINT_LIGHTS > 0\n\tuniform vec3 pointLightColor[ MAX_POINT_LIGHTS ];\n\tuniform vec3 pointLightPosition[ MAX_POINT_LIGHTS ];\n\tuniform float pointLightDistance[ MAX_POINT_LIGHTS ];\n#endif\n#if MAX_SPOT_LIGHTS > 0\n\tuniform vec3 spotLightColor[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightPosition[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightDirection[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightAngleCos[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightExponent[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightDistance[ MAX_SPOT_LIGHTS ];\n#endif\n#ifdef WRAP_AROUND\n\tuniform vec3 wrapRGB;\n#endif\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment,"\tgl_FragColor = vec4( vec3( 1.0 ), opacity );\n\tvec3 specularTex = vec3( 1.0 );\n\tvec3 normalTex = texture2D( tNormal, vUv ).xyz * 2.0 - 1.0;\n\tnormalTex.xy *= uNormalScale;\n\tnormalTex = normalize( normalTex );\n\tif( enableDiffuse ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 texelColor = texture2D( tDiffuse, vUv );\n\t\t\ttexelColor.xyz *= texelColor.xyz;\n\t\t\tgl_FragColor = gl_FragColor * texelColor;\n\t\t#else\n\t\t\tgl_FragColor = gl_FragColor * texture2D( tDiffuse, vUv );\n\t\t#endif\n\t}\n\tif( enableAO ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 aoColor = texture2D( tAO, vUv );\n\t\t\taoColor.xyz *= aoColor.xyz;\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * aoColor.xyz;\n\t\t#else\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * texture2D( tAO, vUv ).xyz;\n\t\t#endif\n\t}", +THREE.ShaderChunk.alphatest_fragment,"\tif( enableSpecular )\n\t\tspecularTex = texture2D( tSpecular, vUv ).xyz;\n\tmat3 tsb = mat3( normalize( vTangent ), normalize( vBinormal ), normalize( vNormal ) );\n\tvec3 finalNormal = tsb * normalTex;\n\t#ifdef FLIP_SIDED\n\t\tfinalNormal = -finalNormal;\n\t#endif\n\tvec3 normal = normalize( finalNormal );\n\tvec3 viewPosition = normalize( vViewPosition );\n\t#if MAX_POINT_LIGHTS > 0\n\t\tvec3 pointDiffuse = vec3( 0.0 );\n\t\tvec3 pointSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_POINT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( pointLightPosition[ i ], 1.0 );\n\t\t\tvec3 pointVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat pointDistance = 1.0;\n\t\t\tif ( pointLightDistance[ i ] > 0.0 )\n\t\t\t\tpointDistance = 1.0 - min( ( length( pointVector ) / pointLightDistance[ i ] ), 1.0 );\n\t\t\tpointVector = normalize( pointVector );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat pointDiffuseWeightFull = max( dot( normal, pointVector ), 0.0 );\n\t\t\t\tfloat pointDiffuseWeightHalf = max( 0.5 * dot( normal, pointVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 pointDiffuseWeight = mix( vec3( pointDiffuseWeightFull ), vec3( pointDiffuseWeightHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat pointDiffuseWeight = max( dot( normal, pointVector ), 0.0 );\n\t\t\t#endif\n\t\t\tpointDiffuse += pointDistance * pointLightColor[ i ] * diffuse * pointDiffuseWeight;\n\t\t\tvec3 pointHalfVector = normalize( pointVector + viewPosition );\n\t\t\tfloat pointDotNormalHalf = max( dot( normal, pointHalfVector ), 0.0 );\n\t\t\tfloat pointSpecularWeight = specularTex.r * max( pow( pointDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( pointVector, pointHalfVector ), 0.0 ), 5.0 );\n\t\t\tpointSpecular += schlick * pointLightColor[ i ] * pointSpecularWeight * pointDiffuseWeight * pointDistance * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\tvec3 spotDiffuse = vec3( 0.0 );\n\t\tvec3 spotSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_SPOT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( spotLightPosition[ i ], 1.0 );\n\t\t\tvec3 spotVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat spotDistance = 1.0;\n\t\t\tif ( spotLightDistance[ i ] > 0.0 )\n\t\t\t\tspotDistance = 1.0 - min( ( length( spotVector ) / spotLightDistance[ i ] ), 1.0 );\n\t\t\tspotVector = normalize( spotVector );\n\t\t\tfloat spotEffect = dot( spotLightDirection[ i ], normalize( spotLightPosition[ i ] - vWorldPosition ) );\n\t\t\tif ( spotEffect > spotLightAngleCos[ i ] ) {\n\t\t\t\tspotEffect = max( pow( max( spotEffect, 0.0 ), spotLightExponent[ i ] ), 0.0 );\n\t\t\t\t#ifdef WRAP_AROUND\n\t\t\t\t\tfloat spotDiffuseWeightFull = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t\tfloat spotDiffuseWeightHalf = max( 0.5 * dot( normal, spotVector ) + 0.5, 0.0 );\n\t\t\t\t\tvec3 spotDiffuseWeight = mix( vec3( spotDiffuseWeightFull ), vec3( spotDiffuseWeightHalf ), wrapRGB );\n\t\t\t\t#else\n\t\t\t\t\tfloat spotDiffuseWeight = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t#endif\n\t\t\t\tspotDiffuse += spotDistance * spotLightColor[ i ] * diffuse * spotDiffuseWeight * spotEffect;\n\t\t\t\tvec3 spotHalfVector = normalize( spotVector + viewPosition );\n\t\t\t\tfloat spotDotNormalHalf = max( dot( normal, spotHalfVector ), 0.0 );\n\t\t\t\tfloat spotSpecularWeight = specularTex.r * max( pow( spotDotNormalHalf, shininess ), 0.0 );\n\t\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( spotVector, spotHalfVector ), 0.0 ), 5.0 );\n\t\t\t\tspotSpecular += schlick * spotLightColor[ i ] * spotSpecularWeight * spotDiffuseWeight * spotDistance * specularNormalization * spotEffect;\n\t\t\t}\n\t\t}\n\t#endif\n\t#if MAX_DIR_LIGHTS > 0\n\t\tvec3 dirDiffuse = vec3( 0.0 );\n\t\tvec3 dirSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_DIR_LIGHTS; i++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( directionalLightDirection[ i ], 0.0 );\n\t\t\tvec3 dirVector = normalize( lDirection.xyz );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat directionalLightWeightingFull = max( dot( normal, dirVector ), 0.0 );\n\t\t\t\tfloat directionalLightWeightingHalf = max( 0.5 * dot( normal, dirVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 dirDiffuseWeight = mix( vec3( directionalLightWeightingFull ), vec3( directionalLightWeightingHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat dirDiffuseWeight = max( dot( normal, dirVector ), 0.0 );\n\t\t\t#endif\n\t\t\tdirDiffuse += directionalLightColor[ i ] * diffuse * dirDiffuseWeight;\n\t\t\tvec3 dirHalfVector = normalize( dirVector + viewPosition );\n\t\t\tfloat dirDotNormalHalf = max( dot( normal, dirHalfVector ), 0.0 );\n\t\t\tfloat dirSpecularWeight = specularTex.r * max( pow( dirDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( dirVector, dirHalfVector ), 0.0 ), 5.0 );\n\t\t\tdirSpecular += schlick * directionalLightColor[ i ] * dirSpecularWeight * dirDiffuseWeight * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\tvec3 hemiDiffuse = vec3( 0.0 );\n\t\tvec3 hemiSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_HEMI_LIGHTS; i ++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( hemisphereLightDirection[ i ], 0.0 );\n\t\t\tvec3 lVector = normalize( lDirection.xyz );\n\t\t\tfloat dotProduct = dot( normal, lVector );\n\t\t\tfloat hemiDiffuseWeight = 0.5 * dotProduct + 0.5;\n\t\t\tvec3 hemiColor = mix( hemisphereLightGroundColor[ i ], hemisphereLightSkyColor[ i ], hemiDiffuseWeight );\n\t\t\themiDiffuse += diffuse * hemiColor;\n\t\t\tvec3 hemiHalfVectorSky = normalize( lVector + viewPosition );\n\t\t\tfloat hemiDotNormalHalfSky = 0.5 * dot( normal, hemiHalfVectorSky ) + 0.5;\n\t\t\tfloat hemiSpecularWeightSky = specularTex.r * max( pow( max( hemiDotNormalHalfSky, 0.0 ), shininess ), 0.0 );\n\t\t\tvec3 lVectorGround = -lVector;\n\t\t\tvec3 hemiHalfVectorGround = normalize( lVectorGround + viewPosition );\n\t\t\tfloat hemiDotNormalHalfGround = 0.5 * dot( normal, hemiHalfVectorGround ) + 0.5;\n\t\t\tfloat hemiSpecularWeightGround = specularTex.r * max( pow( max( hemiDotNormalHalfGround, 0.0 ), shininess ), 0.0 );\n\t\t\tfloat dotProductGround = dot( normal, lVectorGround );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlickSky = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVector, hemiHalfVectorSky ), 0.0 ), 5.0 );\n\t\t\tvec3 schlickGround = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVectorGround, hemiHalfVectorGround ), 0.0 ), 5.0 );\n\t\t\themiSpecular += hemiColor * specularNormalization * ( schlickSky * hemiSpecularWeightSky * max( dotProduct, 0.0 ) + schlickGround * hemiSpecularWeightGround * max( dotProductGround, 0.0 ) );\n\t\t}\n\t#endif\n\tvec3 totalDiffuse = vec3( 0.0 );\n\tvec3 totalSpecular = vec3( 0.0 );\n\t#if MAX_DIR_LIGHTS > 0\n\t\ttotalDiffuse += dirDiffuse;\n\t\ttotalSpecular += dirSpecular;\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\ttotalDiffuse += hemiDiffuse;\n\t\ttotalSpecular += hemiSpecular;\n\t#endif\n\t#if MAX_POINT_LIGHTS > 0\n\t\ttotalDiffuse += pointDiffuse;\n\t\ttotalSpecular += pointSpecular;\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\ttotalDiffuse += spotDiffuse;\n\t\ttotalSpecular += spotSpecular;\n\t#endif\n\t#ifdef METAL\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient + totalSpecular );\n\t#else\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient ) + totalSpecular;\n\t#endif\n\tif ( enableReflection ) {\n\t\tvec3 vReflect;\n\t\tvec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );\n\t\tif ( useRefract ) {\n\t\t\tvReflect = refract( cameraToVertex, normal, refractionRatio );\n\t\t} else {\n\t\t\tvReflect = reflect( cameraToVertex, normal );\n\t\t}\n\t\tvec4 cubeColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );\n\t\t#ifdef GAMMA_INPUT\n\t\t\tcubeColor.xyz *= cubeColor.xyz;\n\t\t#endif\n\t\tgl_FragColor.xyz = mix( gl_FragColor.xyz, cubeColor.xyz, specularTex.r * reflectivity );\n\t}", +THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fragment,"}"].join("\n"),vertexShader:["attribute vec4 tangent;\nuniform vec2 uOffset;\nuniform vec2 uRepeat;\nuniform bool enableDisplacement;\n#ifdef VERTEX_TEXTURES\n\tuniform sampler2D tDisplacement;\n\tuniform float uDisplacementScale;\n\tuniform float uDisplacementBias;\n#endif\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.skinnormal_vertex,"\t#ifdef USE_SKINNING\n\t\tvNormal = normalize( normalMatrix * skinnedNormal.xyz );\n\t\tvec4 skinnedTangent = skinMatrix * vec4( tangent.xyz, 0.0 );\n\t\tvTangent = normalize( normalMatrix * skinnedTangent.xyz );\n\t#else\n\t\tvNormal = normalize( normalMatrix * normal );\n\t\tvTangent = normalize( normalMatrix * tangent.xyz );\n\t#endif\n\tvBinormal = normalize( cross( vNormal, vTangent ) * tangent.w );\n\tvUv = uv * uRepeat + uOffset;\n\tvec3 displacedPosition;\n\t#ifdef VERTEX_TEXTURES\n\t\tif ( enableDisplacement ) {\n\t\t\tvec3 dv = texture2D( tDisplacement, uv ).xyz;\n\t\t\tfloat df = uDisplacementScale * dv.x + uDisplacementBias;\n\t\t\tdisplacedPosition = position + normalize( normal ) * df;\n\t\t} else {\n\t\t\t#ifdef USE_SKINNING\n\t\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\t\tdisplacedPosition = skinned.xyz;\n\t\t\t#else\n\t\t\t\tdisplacedPosition = position;\n\t\t\t#endif\n\t\t}\n\t#else\n\t\t#ifdef USE_SKINNING\n\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\tdisplacedPosition = skinned.xyz;\n\t\t#else\n\t\t\tdisplacedPosition = position;\n\t\t#endif\n\t#endif\n\tvec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );\n\tvec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );\n\tgl_Position = projectionMatrix * mvPosition;", +THREE.ShaderChunk.logdepthbuf_vertex,"\tvWorldPosition = worldPosition.xyz;\n\tvViewPosition = -mvPosition.xyz;\n\t#ifdef USE_SHADOWMAP\n\t\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\t\t\tvShadowCoord[ i ] = shadowMatrix[ i ] * worldPosition;\n\t\t}\n\t#endif\n}"].join("\n")},cube:{uniforms:{tCube:{type:"t",value:null},tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvec4 worldPosition = modelMatrix * vec4( position, 1.0 );\n\tvWorldPosition = worldPosition.xyz;\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", +THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform samplerCube tCube;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},depthRGBA:{uniforms:{},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex, +"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:[THREE.ShaderChunk.logdepthbuf_pars_fragment,"vec4 pack_depth( const in float depth ) {\n\tconst vec4 bit_shift = vec4( 256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0 );\n\tconst vec4 bit_mask = vec4( 0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0 );\n\tvec4 res = mod( depth * bit_shift * vec4( 255 ), vec4( 256 ) ) / vec4( 255 );\n\tres -= res.xxyz * bit_mask;\n\treturn res;\n}\nvoid main() {", +THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragDepthEXT );\n\t#else\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );\n\t#endif\n}"].join("\n")}}; +THREE.WebGLRenderer=function(a){function b(a){var b=a.geometry;a=a.material;var c=b.vertices.length;if(a.attributes){void 0===b.__webglCustomAttributesList&&(b.__webglCustomAttributesList=[]);for(var d in a.attributes){var e=a.attributes[d];if(!e.__webglInitialized||e.createUniqueBuffers){e.__webglInitialized=!0;var f=1;"v2"===e.type?f=2:"v3"===e.type?f=3:"v4"===e.type?f=4:"c"===e.type&&(f=3);e.size=f;e.array=new Float32Array(c*f);e.buffer=l.createBuffer();e.buffer.belongsToAttribute=d;e.needsUpdate= +!0}b.__webglCustomAttributesList.push(e)}}}function c(a,b){var c=b.geometry,e=a.faces3,f=3*e.length,g=1*e.length,h=3*e.length,e=d(b,a);a.__vertexArray=new Float32Array(3*f);a.__normalArray=new Float32Array(3*f);a.__colorArray=new Float32Array(3*f);a.__uvArray=new Float32Array(2*f);1Aa;Aa++)Cb=ma[Aa],Ta[Sa]=Cb.x,Ta[Sa+1]=Cb.y,Ta[Sa+2]=Cb.z,Sa+=3;else for(Aa=0;3>Aa;Aa++)Ta[Sa]=pa.x,Ta[Sa+1]=pa.y,Ta[Sa+2]=pa.z,Sa+=3;l.bindBuffer(l.ARRAY_BUFFER,C.__webglNormalBuffer);l.bufferData(l.ARRAY_BUFFER, +Ta,S)}if(Kc&&ua){M=0;for(ea=N.length;MAa;Aa++)Oa=hb[Aa],sb[qb]=Oa.x,sb[qb+1]=Oa.y,qb+=2;0Aa;Aa++)Qb=za[Aa],fb[rb]=Qb.x,fb[rb+1]=Qb.y,rb+=2;0h&&(f[v].counter+=1,k=f[v].hash+"_"+f[v].counter,k in r||(p={id:rc++, +faces3:[],materialIndex:v,vertices:0,numMorphTargets:m,numMorphNormals:n},r[k]=p,q.push(p)));r[k].faces3.push(t);r[k].vertices+=3}a[g]=q;d.groupsNeedUpdate=!1}a=xb[d.id];g=0;for(e=a.length;gDa;Da++)kb[Da]=!J.autoScaleCubemaps||Ob||Tb?Tb?ua.image[Da].image:ua.image[Da]:R(ua.image[Da],$c);var ka=kb[0],Zb=THREE.Math.isPowerOfTwo(ka.width)&&THREE.Math.isPowerOfTwo(ka.height),ab=Q(ua.format),Fb=Q(ua.type);F(l.TEXTURE_CUBE_MAP,ua,Zb);for(Da=0;6>Da;Da++)if(Ob)for(var gb,$b=kb[Da].mipmaps,ga=0,Xb=$b.length;ga=Oc&&console.warn("WebGLRenderer: trying to use "+a+" texture units while this GPU supports only "+ +Oc);dc+=1;return a}function x(a,b){a._modelViewMatrix.multiplyMatrices(b.matrixWorldInverse,a.matrixWorld);a._normalMatrix.getNormalMatrix(a._modelViewMatrix)}function D(a,b,c,d){a[b]=c.r*c.r*d;a[b+1]=c.g*c.g*d;a[b+2]=c.b*c.b*d}function E(a,b,c,d){a[b]=c.r*d;a[b+1]=c.g*d;a[b+2]=c.b*d}function A(a){a!==Pc&&(l.lineWidth(a),Pc=a)}function B(a,b,c){Qc!==a&&(a?l.enable(l.POLYGON_OFFSET_FILL):l.disable(l.POLYGON_OFFSET_FILL),Qc=a);!a||Rc===b&&Sc===c||(l.polygonOffset(b,c),Rc=b,Sc=c)}function F(a,b,c){c? +(l.texParameteri(a,l.TEXTURE_WRAP_S,Q(b.wrapS)),l.texParameteri(a,l.TEXTURE_WRAP_T,Q(b.wrapT)),l.texParameteri(a,l.TEXTURE_MAG_FILTER,Q(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,Q(b.minFilter))):(l.texParameteri(a,l.TEXTURE_WRAP_S,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_WRAP_T,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_MAG_FILTER,T(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,T(b.minFilter)));(c=pa.get("EXT_texture_filter_anisotropic"))&&b.type!==THREE.FloatType&&(1b||a.height>b){var c=b/Math.max(a.width,a.height),d=document.createElement("canvas");d.width=Math.floor(a.width*c);d.height=Math.floor(a.height*c);d.getContext("2d").drawImage(a,0,0,a.width,a.height,0,0,d.width,d.height);console.log("THREE.WebGLRenderer:",a,"is too big ("+a.width+"x"+a.height+"). Resized to "+d.width+"x"+d.height+ +".");return d}return a}function H(a,b){l.bindRenderbuffer(l.RENDERBUFFER,a);b.depthBuffer&&!b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_COMPONENT16,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT,l.RENDERBUFFER,a)):b.depthBuffer&&b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_STENCIL,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a)):l.renderbufferStorage(l.RENDERBUFFER,l.RGBA4,b.width, +b.height)}function C(a){a instanceof THREE.WebGLRenderTargetCube?(l.bindTexture(l.TEXTURE_CUBE_MAP,a.__webglTexture),l.generateMipmap(l.TEXTURE_CUBE_MAP),l.bindTexture(l.TEXTURE_CUBE_MAP,null)):(l.bindTexture(l.TEXTURE_2D,a.__webglTexture),l.generateMipmap(l.TEXTURE_2D),l.bindTexture(l.TEXTURE_2D,null))}function T(a){return a===THREE.NearestFilter||a===THREE.NearestMipMapNearestFilter||a===THREE.NearestMipMapLinearFilter?l.NEAREST:l.LINEAR}function Q(a){var b;if(a===THREE.RepeatWrapping)return l.REPEAT; +if(a===THREE.ClampToEdgeWrapping)return l.CLAMP_TO_EDGE;if(a===THREE.MirroredRepeatWrapping)return l.MIRRORED_REPEAT;if(a===THREE.NearestFilter)return l.NEAREST;if(a===THREE.NearestMipMapNearestFilter)return l.NEAREST_MIPMAP_NEAREST;if(a===THREE.NearestMipMapLinearFilter)return l.NEAREST_MIPMAP_LINEAR;if(a===THREE.LinearFilter)return l.LINEAR;if(a===THREE.LinearMipMapNearestFilter)return l.LINEAR_MIPMAP_NEAREST;if(a===THREE.LinearMipMapLinearFilter)return l.LINEAR_MIPMAP_LINEAR;if(a===THREE.UnsignedByteType)return l.UNSIGNED_BYTE; +if(a===THREE.UnsignedShort4444Type)return l.UNSIGNED_SHORT_4_4_4_4;if(a===THREE.UnsignedShort5551Type)return l.UNSIGNED_SHORT_5_5_5_1;if(a===THREE.UnsignedShort565Type)return l.UNSIGNED_SHORT_5_6_5;if(a===THREE.ByteType)return l.BYTE;if(a===THREE.ShortType)return l.SHORT;if(a===THREE.UnsignedShortType)return l.UNSIGNED_SHORT;if(a===THREE.IntType)return l.INT;if(a===THREE.UnsignedIntType)return l.UNSIGNED_INT;if(a===THREE.FloatType)return l.FLOAT;if(a===THREE.AlphaFormat)return l.ALPHA;if(a===THREE.RGBFormat)return l.RGB; +if(a===THREE.RGBAFormat)return l.RGBA;if(a===THREE.LuminanceFormat)return l.LUMINANCE;if(a===THREE.LuminanceAlphaFormat)return l.LUMINANCE_ALPHA;if(a===THREE.AddEquation)return l.FUNC_ADD;if(a===THREE.SubtractEquation)return l.FUNC_SUBTRACT;if(a===THREE.ReverseSubtractEquation)return l.FUNC_REVERSE_SUBTRACT;if(a===THREE.ZeroFactor)return l.ZERO;if(a===THREE.OneFactor)return l.ONE;if(a===THREE.SrcColorFactor)return l.SRC_COLOR;if(a===THREE.OneMinusSrcColorFactor)return l.ONE_MINUS_SRC_COLOR;if(a=== +THREE.SrcAlphaFactor)return l.SRC_ALPHA;if(a===THREE.OneMinusSrcAlphaFactor)return l.ONE_MINUS_SRC_ALPHA;if(a===THREE.DstAlphaFactor)return l.DST_ALPHA;if(a===THREE.OneMinusDstAlphaFactor)return l.ONE_MINUS_DST_ALPHA;if(a===THREE.DstColorFactor)return l.DST_COLOR;if(a===THREE.OneMinusDstColorFactor)return l.ONE_MINUS_DST_COLOR;if(a===THREE.SrcAlphaSaturateFactor)return l.SRC_ALPHA_SATURATE;b=pa.get("WEBGL_compressed_texture_s3tc");if(null!==b){if(a===THREE.RGB_S3TC_DXT1_Format)return b.COMPRESSED_RGB_S3TC_DXT1_EXT; +if(a===THREE.RGBA_S3TC_DXT1_Format)return b.COMPRESSED_RGBA_S3TC_DXT1_EXT;if(a===THREE.RGBA_S3TC_DXT3_Format)return b.COMPRESSED_RGBA_S3TC_DXT3_EXT;if(a===THREE.RGBA_S3TC_DXT5_Format)return b.COMPRESSED_RGBA_S3TC_DXT5_EXT}b=pa.get("WEBGL_compressed_texture_pvrtc");if(null!==b){if(a===THREE.RGB_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;if(a===THREE.RGB_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;if(a===THREE.RGBA_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG; +if(a===THREE.RGBA_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG}b=pa.get("EXT_blend_minmax");if(null!==b){if(a===THREE.MinEquation)return b.MIN_EXT;if(a===THREE.MaxEquation)return b.MAX_EXT}return 0}console.log("THREE.WebGLRenderer",THREE.REVISION);a=a||{};var O=void 0!==a.canvas?a.canvas:document.createElement("canvas"),S=void 0!==a.context?a.context:null,X=void 0!==a.precision?a.precision:"highp",Y=void 0!==a.alpha?a.alpha:!1,la=void 0!==a.depth?a.depth:!0,ma=void 0!==a.stencil? +a.stencil:!0,ya=void 0!==a.antialias?a.antialias:!1,P=void 0!==a.premultipliedAlpha?a.premultipliedAlpha:!0,Ga=void 0!==a.preserveDrawingBuffer?a.preserveDrawingBuffer:!1,Fa=void 0!==a.logarithmicDepthBuffer?a.logarithmicDepthBuffer:!1,za=new THREE.Color(0),bb=0,cb=[],ob={},jb=[],Jb=[],Ib=[],yb=[],Ra=[];this.domElement=O;this.context=null;this.devicePixelRatio=void 0!==a.devicePixelRatio?a.devicePixelRatio:void 0!==self.devicePixelRatio?self.devicePixelRatio:1;this.sortObjects=this.autoClearStencil= +this.autoClearDepth=this.autoClearColor=this.autoClear=!0;this.shadowMapEnabled=this.gammaOutput=this.gammaInput=!1;this.shadowMapType=THREE.PCFShadowMap;this.shadowMapCullFace=THREE.CullFaceFront;this.shadowMapCascade=this.shadowMapDebug=!1;this.maxMorphTargets=8;this.maxMorphNormals=4;this.autoScaleCubemaps=!0;this.info={memory:{programs:0,geometries:0,textures:0},render:{calls:0,vertices:0,faces:0,points:0}};var J=this,hb=[],tc=null,Tc=null,Kb=-1,Oa=-1,ec=null,dc=0,Lb=-1,Mb=-1,pb=-1,Nb=-1,Ob=-1, +Xb=-1,Yb=-1,nb=-1,Qc=null,Rc=null,Sc=null,Pc=null,Pb=0,kc=0,lc=O.width,mc=O.height,Uc=0,Vc=0,wb=new Uint8Array(16),ib=new Uint8Array(16),Ec=new THREE.Frustum,Ac=new THREE.Matrix4,Gc=new THREE.Matrix4,Na=new THREE.Vector3,sa=new THREE.Vector3,fc=!0,Mc={ambient:[0,0,0],directional:{length:0,colors:[],positions:[]},point:{length:0,colors:[],positions:[],distances:[]},spot:{length:0,colors:[],positions:[],distances:[],directions:[],anglesCos:[],exponents:[]},hemi:{length:0,skyColors:[],groundColors:[], +positions:[]}},l;try{var Wc={alpha:Y,depth:la,stencil:ma,antialias:ya,premultipliedAlpha:P,preserveDrawingBuffer:Ga};l=S||O.getContext("webgl",Wc)||O.getContext("experimental-webgl",Wc);if(null===l){if(null!==O.getContext("webgl"))throw"Error creating WebGL context with your selected attributes.";throw"Error creating WebGL context.";}}catch(ad){console.error(ad)}void 0===l.getShaderPrecisionFormat&&(l.getShaderPrecisionFormat=function(){return{rangeMin:1,rangeMax:1,precision:1}});var pa=new THREE.WebGLExtensions(l); +pa.get("OES_texture_float");pa.get("OES_texture_float_linear");pa.get("OES_standard_derivatives");Fa&&pa.get("EXT_frag_depth");l.clearColor(0,0,0,1);l.clearDepth(1);l.clearStencil(0);l.enable(l.DEPTH_TEST);l.depthFunc(l.LEQUAL);l.frontFace(l.CCW);l.cullFace(l.BACK);l.enable(l.CULL_FACE);l.enable(l.BLEND);l.blendEquation(l.FUNC_ADD);l.blendFunc(l.SRC_ALPHA,l.ONE_MINUS_SRC_ALPHA);l.viewport(Pb,kc,lc,mc);l.clearColor(za.r,za.g,za.b,bb);this.context=l;var Oc=l.getParameter(l.MAX_TEXTURE_IMAGE_UNITS), +bd=l.getParameter(l.MAX_VERTEX_TEXTURE_IMAGE_UNITS),cd=l.getParameter(l.MAX_TEXTURE_SIZE),$c=l.getParameter(l.MAX_CUBE_MAP_TEXTURE_SIZE),sc=0b;b++)l.deleteFramebuffer(a.__webglFramebuffer[b]),l.deleteRenderbuffer(a.__webglRenderbuffer[b]); +else l.deleteFramebuffer(a.__webglFramebuffer),l.deleteRenderbuffer(a.__webglRenderbuffer);delete a.__webglFramebuffer;delete a.__webglRenderbuffer}J.info.memory.textures--},Dc=function(a){a=a.target;a.removeEventListener("dispose",Dc);Cc(a)},Yc=function(a){for(var b="__webglVertexBuffer __webglNormalBuffer __webglTangentBuffer __webglColorBuffer __webglUVBuffer __webglUV2Buffer __webglSkinIndicesBuffer __webglSkinWeightsBuffer __webglFaceBuffer __webglLineBuffer __webglLineDistanceBuffer".split(" "), +c=0,d=b.length;cd.numSupportedMorphTargets?(n.sort(p),n.length=d.numSupportedMorphTargets):n.length>d.numSupportedMorphNormals?n.sort(p):0===n.length&&n.push([0, +0]);for(m=0;mf;f++){a.__webglFramebuffer[f]=l.createFramebuffer();a.__webglRenderbuffer[f]=l.createRenderbuffer();l.texImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X+f,0,d,a.width,a.height,0,d,e,null);var g=a,h=l.TEXTURE_CUBE_MAP_POSITIVE_X+f;l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer[f]);l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,h,g.__webglTexture,0);H(a.__webglRenderbuffer[f],a)}c&&l.generateMipmap(l.TEXTURE_CUBE_MAP)}else a.__webglFramebuffer= +l.createFramebuffer(),a.__webglRenderbuffer=a.shareDepthFrom?a.shareDepthFrom.__webglRenderbuffer:l.createRenderbuffer(),l.bindTexture(l.TEXTURE_2D,a.__webglTexture),F(l.TEXTURE_2D,a,c),l.texImage2D(l.TEXTURE_2D,0,d,a.width,a.height,0,d,e,null),d=l.TEXTURE_2D,l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer),l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,d,a.__webglTexture,0),a.shareDepthFrom?a.depthBuffer&&!a.stencilBuffer?l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT, +l.RENDERBUFFER,a.__webglRenderbuffer):a.depthBuffer&&a.stencilBuffer&&l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a.__webglRenderbuffer):H(a.__webglRenderbuffer,a),c&&l.generateMipmap(l.TEXTURE_2D);b?l.bindTexture(l.TEXTURE_CUBE_MAP,null):l.bindTexture(l.TEXTURE_2D,null);l.bindRenderbuffer(l.RENDERBUFFER,null);l.bindFramebuffer(l.FRAMEBUFFER,null)}a?(b=b?a.__webglFramebuffer[a.activeCubeFace]:a.__webglFramebuffer,c=a.width,a=a.height,e=d=0):(b=null,c=lc,a=mc, +d=Pb,e=kc);b!==Tc&&(l.bindFramebuffer(l.FRAMEBUFFER,b),l.viewport(d,e,c,a),Tc=b);Uc=c;Vc=a};this.initMaterial=function(){console.warn("THREE.WebGLRenderer: .initMaterial() has been removed.")};this.addPrePlugin=function(){console.warn("THREE.WebGLRenderer: .addPrePlugin() has been removed.")};this.addPostPlugin=function(){console.warn("THREE.WebGLRenderer: .addPostPlugin() has been removed.")};this.updateShadowMap=function(){console.warn("THREE.WebGLRenderer: .updateShadowMap() has been removed.")}}; +THREE.WebGLRenderTarget=function(a,b,c){this.width=a;this.height=b;c=c||{};this.wrapS=void 0!==c.wrapS?c.wrapS:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==c.wrapT?c.wrapT:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==c.magFilter?c.magFilter:THREE.LinearFilter;this.minFilter=void 0!==c.minFilter?c.minFilter:THREE.LinearMipMapLinearFilter;this.anisotropy=void 0!==c.anisotropy?c.anisotropy:1;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.format=void 0!==c.format?c.format: +THREE.RGBAFormat;this.type=void 0!==c.type?c.type:THREE.UnsignedByteType;this.depthBuffer=void 0!==c.depthBuffer?c.depthBuffer:!0;this.stencilBuffer=void 0!==c.stencilBuffer?c.stencilBuffer:!0;this.generateMipmaps=!0;this.shareDepthFrom=null}; +THREE.WebGLRenderTarget.prototype={constructor:THREE.WebGLRenderTarget,setSize:function(a,b){this.width=a;this.height=b},clone:function(){var a=new THREE.WebGLRenderTarget(this.width,this.height);a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.offset.copy(this.offset);a.repeat.copy(this.repeat);a.format=this.format;a.type=this.type;a.depthBuffer=this.depthBuffer;a.stencilBuffer=this.stencilBuffer;a.generateMipmaps=this.generateMipmaps; +a.shareDepthFrom=this.shareDepthFrom;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube=function(a,b,c){THREE.WebGLRenderTarget.call(this,a,b,c);this.activeCubeFace=0};THREE.WebGLRenderTargetCube.prototype=Object.create(THREE.WebGLRenderTarget.prototype); +THREE.WebGLExtensions=function(a){var b={};this.get=function(c){if(void 0!==b[c])return b[c];var d;switch(c){case "OES_texture_float":d=a.getExtension("OES_texture_float");break;case "OES_texture_float_linear":d=a.getExtension("OES_texture_float_linear");break;case "OES_standard_derivatives":d=a.getExtension("OES_standard_derivatives");break;case "EXT_texture_filter_anisotropic":d=a.getExtension("EXT_texture_filter_anisotropic")||a.getExtension("MOZ_EXT_texture_filter_anisotropic")||a.getExtension("WEBKIT_EXT_texture_filter_anisotropic"); +break;case "WEBGL_compressed_texture_s3tc":d=a.getExtension("WEBGL_compressed_texture_s3tc")||a.getExtension("MOZ_WEBGL_compressed_texture_s3tc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc");break;case "WEBGL_compressed_texture_pvrtc":d=a.getExtension("WEBGL_compressed_texture_pvrtc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc");break;case "OES_element_index_uint":d=a.getExtension("OES_element_index_uint");break;case "EXT_blend_minmax":d=a.getExtension("EXT_blend_minmax");break; +case "EXT_frag_depth":d=a.getExtension("EXT_frag_depth")}null===d&&console.log("THREE.WebGLRenderer: "+c+" extension not supported.");return b[c]=d}}; +THREE.WebGLProgram=function(){var a=0;return function(b,c,d,e){var f=b.context,g=d.defines,h=d.__webglShader.uniforms,k=d.attributes,n=d.__webglShader.vertexShader,p=d.__webglShader.fragmentShader,q=d.index0AttributeName;void 0===q&&!0===e.morphTargets&&(q="position");var m="SHADOWMAP_TYPE_BASIC";e.shadowMapType===THREE.PCFShadowMap?m="SHADOWMAP_TYPE_PCF":e.shadowMapType===THREE.PCFSoftShadowMap&&(m="SHADOWMAP_TYPE_PCF_SOFT");var r,t;r=[];for(var s in g)t=g[s],!1!==t&&(t="#define "+s+" "+t,r.push(t)); +r=r.join("\n");g=f.createProgram();d instanceof THREE.RawShaderMaterial?b=d="":(d=["precision "+e.precision+" float;","precision "+e.precision+" int;",r,e.supportsVertexTextures?"#define VERTEX_TEXTURES":"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"","#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,"#define MAX_BONES "+ +e.maxBones,e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.skinning?"#define USE_SKINNING":"",e.useVertexTexture?"#define BONE_TEXTURE":"",e.morphTargets?"#define USE_MORPHTARGETS":"",e.morphNormals?"#define USE_MORPHNORMALS":"",e.wrapAround?"#define WRAP_AROUND": +"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.sizeAttenuation?"#define USE_SIZEATTENUATION":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 modelMatrix;\nuniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat4 viewMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\nattribute vec3 position;\nattribute vec3 normal;\nattribute vec2 uv;\nattribute vec2 uv2;\n#ifdef USE_COLOR\n\tattribute vec3 color;\n#endif\n#ifdef USE_MORPHTARGETS\n\tattribute vec3 morphTarget0;\n\tattribute vec3 morphTarget1;\n\tattribute vec3 morphTarget2;\n\tattribute vec3 morphTarget3;\n\t#ifdef USE_MORPHNORMALS\n\t\tattribute vec3 morphNormal0;\n\t\tattribute vec3 morphNormal1;\n\t\tattribute vec3 morphNormal2;\n\t\tattribute vec3 morphNormal3;\n\t#else\n\t\tattribute vec3 morphTarget4;\n\t\tattribute vec3 morphTarget5;\n\t\tattribute vec3 morphTarget6;\n\t\tattribute vec3 morphTarget7;\n\t#endif\n#endif\n#ifdef USE_SKINNING\n\tattribute vec4 skinIndex;\n\tattribute vec4 skinWeight;\n#endif\n"].join("\n"), +b=["precision "+e.precision+" float;","precision "+e.precision+" int;",e.bumpMap||e.normalMap?"#extension GL_OES_standard_derivatives : enable":"",r,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,e.alphaTest?"#define ALPHATEST "+e.alphaTest:"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"",e.useFog&&e.fog?"#define USE_FOG": +"",e.useFog&&e.fogExp?"#define FOG_EXP2":"",e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.metal?"#define METAL":"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP": +"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 viewMatrix;\nuniform vec3 cameraPosition;\n"].join("\n"));n=new THREE.WebGLShader(f,f.VERTEX_SHADER,d+n);p=new THREE.WebGLShader(f,f.FRAGMENT_SHADER,b+p);f.attachShader(g,n);f.attachShader(g,p);void 0!==q&&f.bindAttribLocation(g,0,q);f.linkProgram(g);!1===f.getProgramParameter(g,f.LINK_STATUS)&&(console.error("THREE.WebGLProgram: Could not initialise shader."), +console.error("gl.VALIDATE_STATUS",f.getProgramParameter(g,f.VALIDATE_STATUS)),console.error("gl.getError()",f.getError()));""!==f.getProgramInfoLog(g)&&console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",f.getProgramInfoLog(g));f.deleteShader(n);f.deleteShader(p);q="viewMatrix modelViewMatrix projectionMatrix normalMatrix modelMatrix cameraPosition morphTargetInfluences bindMatrix bindMatrixInverse".split(" ");e.useVertexTexture?(q.push("boneTexture"),q.push("boneTextureWidth"),q.push("boneTextureHeight")): +q.push("boneGlobalMatrices");e.logarithmicDepthBuffer&&q.push("logDepthBufFC");for(var u in h)q.push(u);h=q;u={};q=0;for(b=h.length;qT;T++)F[T]=new THREE.Vector3,A[T]=new THREE.Vector3;F=B.shadowCascadeNearZ[C];B=B.shadowCascadeFarZ[C];A[0].set(-1,-1,F);A[1].set(1,-1,F);A[2].set(-1,1,F);A[3].set(1,1,F);A[4].set(-1,-1,B);A[5].set(1,-1,B);A[6].set(-1,1,B);A[7].set(1,1,B);H.originalCamera=v;A=new THREE.Gyroscope;A.position.copy(x.shadowCascadeOffset);A.add(H);A.add(H.target);v.add(A);x.shadowCascadeArray[E]=H;console.log("Created virtualLight",H)}C= +x;F=E;B=C.shadowCascadeArray[F];B.position.copy(C.position);B.target.position.copy(C.target.position);B.lookAt(B.target);B.shadowCameraVisible=C.shadowCameraVisible;B.shadowDarkness=C.shadowDarkness;B.shadowBias=C.shadowCascadeBias[F];A=C.shadowCascadeNearZ[F];C=C.shadowCascadeFarZ[F];B=B.pointsFrustum;B[0].z=A;B[1].z=A;B[2].z=A;B[3].z=A;B[4].z=C;B[5].z=C;B[6].z=C;B[7].z=C;R[D]=H;D++}else R[D]=x,D++;u=0;for(K=R.length;uC;C++)F=B[C],F.copy(A[C]),F.unproject(E),F.applyMatrix4(D.matrixWorldInverse),F.xr.x&&(r.x=F.x),F.yr.y&&(r.y=F.y),F.zr.z&&(r.z=F.z);D.left=m.x;D.right=r.x;D.top=r.y;D.bottom=m.y;D.updateProjectionMatrix()}D=x.shadowMap;A=x.shadowMatrix;E=x.shadowCamera;E.position.setFromMatrixPosition(x.matrixWorld);t.setFromMatrixPosition(x.target.matrixWorld);E.lookAt(t);E.updateMatrixWorld();E.matrixWorldInverse.getInverse(E.matrixWorld);x.cameraHelper&& +(x.cameraHelper.visible=x.shadowCameraVisible);x.shadowCameraVisible&&x.cameraHelper.update();A.set(.5,0,0,.5,0,.5,0,.5,0,0,.5,.5,0,0,0,1);A.multiply(E.projectionMatrix);A.multiply(E.matrixWorldInverse);q.multiplyMatrices(E.projectionMatrix,E.matrixWorldInverse);p.setFromMatrix(q);a.setRenderTarget(D);a.clear();s.length=0;e(c,c,E);x=0;for(D=s.length;x 0 ) {\nfloat depth = gl_FragCoord.z / gl_FragCoord.w;\nfloat fogFactor = 0.0;\nif ( fogType == 1 ) {\nfogFactor = smoothstep( fogNear, fogFar, depth );\n} else {\nconst float LOG2 = 1.442695;\nfloat fogFactor = exp2( - fogDensity * fogDensity * depth * depth * LOG2 );\nfogFactor = 1.0 - clamp( fogFactor, 0.0, 1.0 );\n}\ngl_FragColor = mix( gl_FragColor, vec4( fogColor, gl_FragColor.w ), fogFactor );\n}\n}"].join("\n")); +w.compileShader(R);w.compileShader(H);w.attachShader(F,R);w.attachShader(F,H);w.linkProgram(F);D=F;v=w.getAttribLocation(D,"position");y=w.getAttribLocation(D,"uv");c=w.getUniformLocation(D,"uvOffset");d=w.getUniformLocation(D,"uvScale");e=w.getUniformLocation(D,"rotation");f=w.getUniformLocation(D,"scale");g=w.getUniformLocation(D,"color");h=w.getUniformLocation(D,"map");k=w.getUniformLocation(D,"opacity");n=w.getUniformLocation(D,"modelViewMatrix");p=w.getUniformLocation(D,"projectionMatrix");q= +w.getUniformLocation(D,"fogType");m=w.getUniformLocation(D,"fogDensity");r=w.getUniformLocation(D,"fogNear");t=w.getUniformLocation(D,"fogFar");s=w.getUniformLocation(D,"fogColor");u=w.getUniformLocation(D,"alphaTest");F=document.createElement("canvas");F.width=8;F.height=8;R=F.getContext("2d");R.fillStyle="white";R.fillRect(0,0,8,8);E=new THREE.Texture(F);E.needsUpdate=!0}w.useProgram(D);w.enableVertexAttribArray(v);w.enableVertexAttribArray(y);w.disable(w.CULL_FACE);w.enable(w.BLEND);w.bindBuffer(w.ARRAY_BUFFER, +K);w.vertexAttribPointer(v,2,w.FLOAT,!1,16,0);w.vertexAttribPointer(y,2,w.FLOAT,!1,16,8);w.bindBuffer(w.ELEMENT_ARRAY_BUFFER,x);w.uniformMatrix4fv(p,!1,B.projectionMatrix.elements);w.activeTexture(w.TEXTURE0);w.uniform1i(h,0);R=F=0;(H=A.fog)?(w.uniform3f(s,H.color.r,H.color.g,H.color.b),H instanceof THREE.Fog?(w.uniform1f(r,H.near),w.uniform1f(t,H.far),w.uniform1i(q,1),R=F=1):H instanceof THREE.FogExp2&&(w.uniform1f(m,H.density),w.uniform1i(q,2),R=F=2)):(w.uniform1i(q,0),R=F=0);for(var H=0,C=b.length;H< +C;H++){var T=b[H];T._modelViewMatrix.multiplyMatrices(B.matrixWorldInverse,T.matrixWorld);T.z=null===T.renderDepth?-T._modelViewMatrix.elements[14]:T.renderDepth}b.sort(G);for(var Q=[],H=0,C=b.length;Hq-1?0:q-1,r=q+1>e-1?e-1:q+1,t=0>p-1?0:p-1,s=p+1>d-1?d-1:p+1,u=[],v=[0,0,h[4*(q*d+p)]/255*b];u.push([-1,0,h[4*(q*d+t)]/255*b]);u.push([-1,-1,h[4*(m*d+t)]/255*b]);u.push([0,-1,h[4*(m*d+p)]/255*b]);u.push([1,-1,h[4*(m*d+s)]/255*b]);u.push([1,0,h[4*(q*d+s)]/255*b]);u.push([1,1,h[4*(r*d+s)]/255*b]);u.push([0,1,h[4*(r*d+p)]/255* +b]);u.push([-1,1,h[4*(r*d+t)]/255*b]);m=[];t=u.length;for(r=0;re)return null;var f=[],g=[],h=[],k,n,p;if(0=q--){console.log("Warning, unable to triangulate polygon!");break}k=n;e<=k&&(k=0);n=k+1;e<=n&&(n=0);p=n+1;e<=p&&(p=0);var m;a:{var r=m=void 0,t=void 0,s=void 0,u=void 0,v=void 0,y=void 0,G=void 0,w=void 0, +r=a[g[k]].x,t=a[g[k]].y,s=a[g[n]].x,u=a[g[n]].y,v=a[g[p]].x,y=a[g[p]].y;if(1E-10>(s-r)*(y-t)-(u-t)*(v-r))m=!1;else{var K=void 0,x=void 0,D=void 0,E=void 0,A=void 0,B=void 0,F=void 0,R=void 0,H=void 0,C=void 0,H=R=F=w=G=void 0,K=v-s,x=y-u,D=r-v,E=t-y,A=s-r,B=u-t;for(m=0;mk)g=d+1;else if(0b&&(b=0);1=b)return b=c[a]-b,a=this.curves[a],b=1-b/a.getLength(),a.getPointAt(b);a++}return null};THREE.CurvePath.prototype.getLength=function(){var a=this.getCurveLengths();return a[a.length-1]}; +THREE.CurvePath.prototype.getCurveLengths=function(){if(this.cacheLengths&&this.cacheLengths.length==this.curves.length)return this.cacheLengths;var a=[],b=0,c,d=this.curves.length;for(c=0;cb?b=h.x:h.xc?c=h.y:h.yd?d=h.z:h.zMath.abs(d.x-c[0].x)&&1E-10>Math.abs(d.y-c[0].y)&&c.splice(c.length-1,1);b&&c.push(c[0]);return c}; +THREE.Path.prototype.toShapes=function(a,b){function c(a){for(var b=[],c=0,d=a.length;cm&&(g=b[f],k=-k,h=b[e],m=-m),!(a.yh.y))if(a.y==g.y){if(a.x==g.x)return!0}else{e=m*(a.x-g.x)-k*(a.y-g.y);if(0==e)return!0;0>e||(d=!d)}}else if(a.y==g.y&&(h.x<=a.x&&a.x<=g.x||g.x<=a.x&&a.x<= +h.x))return!0}return d}var e=function(a){var b,c,d,e,f=[],g=new THREE.Path;b=0;for(c=a.length;bE||E>D)return[];k=n*p-k*q;if(0>k||k>D)return[]}else{if(0d?[]:k==d?f?[]:[g]:a<=d?[g,h]: +[g,n]}function e(a,b,c,d){var e=b.x-a.x,f=b.y-a.y;b=c.x-a.x;c=c.y-a.y;var g=d.x-a.x;d=d.y-a.y;a=e*c-f*b;e=e*d-f*g;return 1E-10f&&(f=d);var g=a+1;g>d&&(g=0);d=e(h[a],h[f],h[g],k[b]);if(!d)return!1; +d=k.length-1;f=b-1;0>f&&(f=d);g=b+1;g>d&&(g=0);return(d=e(k[b],k[f],k[g],h[a]))?!0:!1}function f(a,b){var c,e;for(c=0;cC){console.log("Infinite Loop! Holes left:"+ +n.length+", Probably Hole outside Shape!");break}for(q=B;qh;h++)n=k[h].x+":"+k[h].y, +n=p[n],void 0!==n&&(k[h]=n);return q.concat()},isClockWise:function(a){return 0>THREE.FontUtils.Triangulate.area(a)},b2p0:function(a,b){var c=1-a;return c*c*b},b2p1:function(a,b){return 2*(1-a)*a*b},b2p2:function(a,b){return a*a*b},b2:function(a,b,c,d){return this.b2p0(a,b)+this.b2p1(a,c)+this.b2p2(a,d)},b3p0:function(a,b){var c=1-a;return c*c*c*b},b3p1:function(a,b){var c=1-a;return 3*c*c*a*b},b3p2:function(a,b){return 3*(1-a)*a*a*b},b3p3:function(a,b){return a*a*a*b},b3:function(a,b,c,d,e){return this.b3p0(a, +b)+this.b3p1(a,c)+this.b3p2(a,d)+this.b3p3(a,e)}};THREE.LineCurve=function(a,b){this.v1=a;this.v2=b};THREE.LineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.LineCurve.prototype.getPoint=function(a){var b=this.v2.clone().sub(this.v1);b.multiplyScalar(a).add(this.v1);return b};THREE.LineCurve.prototype.getPointAt=function(a){return this.getPoint(a)};THREE.LineCurve.prototype.getTangent=function(a){return this.v2.clone().sub(this.v1).normalize()}; +THREE.QuadraticBezierCurve=function(a,b,c){this.v0=a;this.v1=b;this.v2=c};THREE.QuadraticBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.QuadraticBezierCurve.prototype.getPoint=function(a){var b=new THREE.Vector2;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);return b}; +THREE.QuadraticBezierCurve.prototype.getTangent=function(a){var b=new THREE.Vector2;b.x=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.y,this.v1.y,this.v2.y);return b.normalize()};THREE.CubicBezierCurve=function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d};THREE.CubicBezierCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.CubicBezierCurve.prototype.getPoint=function(a){var b;b=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);return new THREE.Vector2(b,a)};THREE.CubicBezierCurve.prototype.getTangent=function(a){var b;b=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b=new THREE.Vector2(b,a);b.normalize();return b}; +THREE.SplineCurve=function(a){this.points=void 0==a?[]:a};THREE.SplineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.SplineCurve.prototype.getPoint=function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector2;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);return c}; +THREE.EllipseCurve=function(a,b,c,d,e,f,g){this.aX=a;this.aY=b;this.xRadius=c;this.yRadius=d;this.aStartAngle=e;this.aEndAngle=f;this.aClockwise=g};THREE.EllipseCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.EllipseCurve.prototype.getPoint=function(a){var b=this.aEndAngle-this.aStartAngle;0>b&&(b+=2*Math.PI);b>2*Math.PI&&(b-=2*Math.PI);a=!0===this.aClockwise?this.aEndAngle+(1-a)*(2*Math.PI-b):this.aStartAngle+a*b;b=new THREE.Vector2;b.x=this.aX+this.xRadius*Math.cos(a);b.y=this.aY+this.yRadius*Math.sin(a);return b};THREE.ArcCurve=function(a,b,c,d,e,f){THREE.EllipseCurve.call(this,a,b,c,c,d,e,f)};THREE.ArcCurve.prototype=Object.create(THREE.EllipseCurve.prototype); +THREE.LineCurve3=THREE.Curve.create(function(a,b){this.v1=a;this.v2=b},function(a){var b=new THREE.Vector3;b.subVectors(this.v2,this.v1);b.multiplyScalar(a);b.add(this.v1);return b});THREE.QuadraticBezierCurve3=THREE.Curve.create(function(a,b,c){this.v0=a;this.v1=b;this.v2=c},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);b.z=THREE.Shape.Utils.b2(a,this.v0.z,this.v1.z,this.v2.z);return b}); +THREE.CubicBezierCurve3=THREE.Curve.create(function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);b.y=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b.z=THREE.Shape.Utils.b3(a,this.v0.z,this.v1.z,this.v2.z,this.v3.z);return b}); +THREE.SplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c}); +THREE.ClosedSplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-0;var c=Math.floor(a);a-=c;var c=c+(0a.hierarchy[b].keys[c].time&&(a.hierarchy[b].keys[c].time= +0),void 0!==a.hierarchy[b].keys[c].rot&&!(a.hierarchy[b].keys[c].rot instanceof THREE.Quaternion)){var d=a.hierarchy[b].keys[c].rot;a.hierarchy[b].keys[c].rot=(new THREE.Quaternion).fromArray(d)}if(a.hierarchy[b].keys.length&&void 0!==a.hierarchy[b].keys[0].morphTargets){d={};for(c=0;cd;d++){for(var e=this.keyTypes[d],f=this.data.hierarchy[a].keys[0],g=this.getNextKeyWith(e,a,1);g.timef.index;)f=g,g=this.getNextKeyWith(e,a,g.index+1);c.prevKey[e]=f;c.nextKey[e]=g}}}; +THREE.Animation.prototype.resetBlendWeights=function(){for(var a=0,b=this.hierarchy.length;aa.length-2?q:q+1;c[3]=q>a.length-3?q:q+2;q=a[c[0]];r=a[c[1]];t=a[c[2]];s=a[c[3]];c=e*e;m=e*c;d[0]=f(q[0],r[0],t[0],s[0],e,c,m);d[1]=f(q[1],r[1],t[1],s[1],e,c,m);d[2]=f(q[2],r[2],t[2],s[2],e,c,m);return d},f=function(a,b,c,d,e,f,m){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*m+ +(-3*(b-c)-2*a-d)*f+a*e+b};return function(f){if(!1!==this.isPlaying&&(this.currentTime+=f*this.timeScale,0!==this.weight)){f=this.data.length;if(this.currentTime>f||0>this.currentTime)if(this.loop)this.currentTime%=f,0>this.currentTime&&(this.currentTime+=f),this.reset();else{this.stop();return}f=0;for(var h=this.hierarchy.length;fq;q++){var m=this.keyTypes[q],r=n.prevKey[m],t=n.nextKey[m]; +if(0this.timeScale&&r.time>=this.currentTime){r=this.data.hierarchy[f].keys[0];for(t=this.getNextKeyWith(m,f,1);t.timer.index;)r=t,t=this.getNextKeyWith(m,f,t.index+1);n.prevKey[m]=r;n.nextKey[m]=t}k.matrixAutoUpdate=!0;k.matrixWorldNeedsUpdate=!0;var s=(this.currentTime-r.time)/(t.time-r.time),u=r[m],v=t[m];0>s&&(s=0);1a&&(this.currentTime%=a);this.currentTime=Math.min(this.currentTime,a);a=0;for(var b=this.hierarchy.length;af.index;)f=g,g=e[f.index+1];d.prevKey= +f;d.nextKey=g}g.time>=this.currentTime?f.interpolate(g,this.currentTime):f.interpolate(g,g.time);this.data.hierarchy[a].node.updateMatrix();c.matrixWorldNeedsUpdate=!0}}}};THREE.KeyFrameAnimation.prototype.getNextKeyWith=function(a,b,c){b=this.data.hierarchy[b].keys;for(c%=b.length;cthis.duration&&(this.currentTime%=this.duration);this.currentTime=Math.min(this.currentTime,this.duration);c=this.duration/this.frames;var d=Math.floor(this.currentTime/c);d!=b&&(this.mesh.morphTargetInfluences[a]=0,this.mesh.morphTargetInfluences[b]=1,this.mesh.morphTargetInfluences[d]= +0,a=b,b=d);this.mesh.morphTargetInfluences[d]=this.currentTime%c/c;this.mesh.morphTargetInfluences[a]=1-this.mesh.morphTargetInfluences[d]}}}()}; +THREE.BoxGeometry=function(a,b,c,d,e,f){function g(a,b,c,d,e,f,g,s){var u,v=h.widthSegments,y=h.heightSegments,G=e/2,w=f/2,K=h.vertices.length;if("x"===a&&"y"===b||"y"===a&&"x"===b)u="z";else if("x"===a&&"z"===b||"z"===a&&"x"===b)u="y",y=h.depthSegments;else if("z"===a&&"y"===b||"y"===a&&"z"===b)u="x",v=h.depthSegments;var x=v+1,D=y+1,E=e/v,A=f/y,B=new THREE.Vector3;B[u]=0=d)return new THREE.Vector2(c,a);d=Math.sqrt(d/2)}else a=!1,1E-10d?-1E-10>f&&(a=!0):Math.sign(e)== +Math.sign(g)&&(a=!0),a?(c=-e,a=d,d=Math.sqrt(h)):(c=d,a=e,d=Math.sqrt(h/2));return new THREE.Vector2(c/d,a/d)}function e(a,b){var c,d;for(P=a.length;0<=--P;){c=P;d=P-1;0>d&&(d=a.length-1);for(var e=0,f=r+2*p,e=0;eMath.abs(b.y-c.y)?[new THREE.Vector2(b.x,1-b.z),new THREE.Vector2(c.x,1-c.z),new THREE.Vector2(d.x,1-d.z),new THREE.Vector2(e.x,1-e.z)]:[new THREE.Vector2(b.y,1-b.z),new THREE.Vector2(c.y,1-c.z),new THREE.Vector2(d.y, +1-d.z),new THREE.Vector2(e.y,1-e.z)]}};THREE.ShapeGeometry=function(a,b){THREE.Geometry.call(this);this.type="ShapeGeometry";!1===a instanceof Array&&(a=[a]);this.addShapeList(a,b);this.computeFaceNormals()};THREE.ShapeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ShapeGeometry.prototype.addShapeList=function(a,b){for(var c=0,d=a.length;cc&&1===a.x&&(a=new THREE.Vector2(a.x-1,a.y));0===b.x&&0===b.z&&(a=new THREE.Vector2(c/2/Math.PI+.5, +a.y));return a.clone()}THREE.Geometry.call(this);this.type="PolyhedronGeometry";this.parameters={vertices:a,indices:b,radius:c,detail:d};c=c||1;d=d||0;for(var k=this,n=0,p=a.length;nr&&(.2>d&&(b[0].x+=1),.2>a&&(b[1].x+=1),.2>q&&(b[2].x+=1));n=0;for(p=this.vertices.length;nc.y?this.quaternion.set(1,0,0,0):(a.set(c.z,0,-c.x).normalize(),b=Math.acos(c.y),this.quaternion.setFromAxisAngle(a,b))}}(); +THREE.ArrowHelper.prototype.setLength=function(a,b,c){void 0===b&&(b=.2*a);void 0===c&&(c=.2*b);this.line.scale.set(1,a,1);this.line.updateMatrix();this.cone.scale.set(c,b,c);this.cone.position.y=a;this.cone.updateMatrix()};THREE.ArrowHelper.prototype.setColor=function(a){this.line.material.color.set(a);this.cone.material.color.set(a)}; +THREE.BoxHelper=function(a){var b=new THREE.BufferGeometry;b.addAttribute("position",new THREE.BufferAttribute(new Float32Array(72),3));THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:16776960}),THREE.LinePieces);void 0!==a&&this.update(a)};THREE.BoxHelper.prototype=Object.create(THREE.Line.prototype); +THREE.BoxHelper.prototype.update=function(a){var b=a.geometry;null===b.boundingBox&&b.computeBoundingBox();var c=b.boundingBox.min,b=b.boundingBox.max,d=this.geometry.attributes.position.array;d[0]=b.x;d[1]=b.y;d[2]=b.z;d[3]=c.x;d[4]=b.y;d[5]=b.z;d[6]=c.x;d[7]=b.y;d[8]=b.z;d[9]=c.x;d[10]=c.y;d[11]=b.z;d[12]=c.x;d[13]=c.y;d[14]=b.z;d[15]=b.x;d[16]=c.y;d[17]=b.z;d[18]=b.x;d[19]=c.y;d[20]=b.z;d[21]=b.x;d[22]=b.y;d[23]=b.z;d[24]=b.x;d[25]=b.y;d[26]=c.z;d[27]=c.x;d[28]=b.y;d[29]=c.z;d[30]=c.x;d[31]=b.y; +d[32]=c.z;d[33]=c.x;d[34]=c.y;d[35]=c.z;d[36]=c.x;d[37]=c.y;d[38]=c.z;d[39]=b.x;d[40]=c.y;d[41]=c.z;d[42]=b.x;d[43]=c.y;d[44]=c.z;d[45]=b.x;d[46]=b.y;d[47]=c.z;d[48]=b.x;d[49]=b.y;d[50]=b.z;d[51]=b.x;d[52]=b.y;d[53]=c.z;d[54]=c.x;d[55]=b.y;d[56]=b.z;d[57]=c.x;d[58]=b.y;d[59]=c.z;d[60]=c.x;d[61]=c.y;d[62]=b.z;d[63]=c.x;d[64]=c.y;d[65]=c.z;d[66]=b.x;d[67]=c.y;d[68]=b.z;d[69]=b.x;d[70]=c.y;d[71]=c.z;this.geometry.attributes.position.needsUpdate=!0;this.geometry.computeBoundingSphere();this.matrix=a.matrixWorld; +this.matrixAutoUpdate=!1};THREE.BoundingBoxHelper=function(a,b){var c=void 0!==b?b:8947848;this.object=a;this.box=new THREE.Box3;THREE.Mesh.call(this,new THREE.BoxGeometry(1,1,1),new THREE.MeshBasicMaterial({color:c,wireframe:!0}))};THREE.BoundingBoxHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.BoundingBoxHelper.prototype.update=function(){this.box.setFromObject(this.object);this.box.size(this.scale);this.box.center(this.position)}; +THREE.CameraHelper=function(a){function b(a,b,d){c(a,d);c(b,d)}function c(a,b){d.vertices.push(new THREE.Vector3);d.colors.push(new THREE.Color(b));void 0===f[a]&&(f[a]=[]);f[a].push(d.vertices.length-1)}var d=new THREE.Geometry,e=new THREE.LineBasicMaterial({color:16777215,vertexColors:THREE.FaceColors}),f={};b("n1","n2",16755200);b("n2","n4",16755200);b("n4","n3",16755200);b("n3","n1",16755200);b("f1","f2",16755200);b("f2","f4",16755200);b("f4","f3",16755200);b("f3","f1",16755200);b("n1","f1",16755200); +b("n2","f2",16755200);b("n3","f3",16755200);b("n4","f4",16755200);b("p","n1",16711680);b("p","n2",16711680);b("p","n3",16711680);b("p","n4",16711680);b("u1","u2",43775);b("u2","u3",43775);b("u3","u1",43775);b("c","t",16777215);b("p","c",3355443);b("cn1","cn2",3355443);b("cn3","cn4",3355443);b("cf1","cf2",3355443);b("cf3","cf4",3355443);THREE.Line.call(this,d,e,THREE.LinePieces);this.camera=a;this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.pointMap=f;this.update()}; +THREE.CameraHelper.prototype=Object.create(THREE.Line.prototype); +THREE.CameraHelper.prototype.update=function(){var a,b,c=new THREE.Vector3,d=new THREE.Camera,e=function(e,g,h,k){c.set(g,h,k).unproject(d);e=b[e];if(void 0!==e)for(g=0,h=e.length;gt;t++){d[0]=r[g[t]];d[1]=r[g[(t+1)%3]];d.sort(f);var s=d.toString();void 0===e[s]?(e[s]={vert1:d[0],vert2:d[1],face1:q,face2:void 0},p++):e[s].face2=q}d=new Float32Array(6*p);f=0;for(s in e)if(g=e[s],void 0===g.face2|| +.9999>k[g.face1].normal.dot(k[g.face2].normal))p=n[g.vert1],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z,p=n[g.vert2],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3));THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.EdgesHelper.prototype=Object.create(THREE.Line.prototype); +THREE.FaceNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;a=void 0!==c?c:16776960;d=void 0!==d?d:1;b=new THREE.Geometry;c=0;for(var e=this.object.geometry.faces.length;cb;b++)a.faces[b].color=this.colors[4>b?0:1];b=new THREE.MeshBasicMaterial({vertexColors:THREE.FaceColors,wireframe:!0});this.lightSphere=new THREE.Mesh(a,b);this.add(this.lightSphere); +this.update()};THREE.HemisphereLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.HemisphereLightHelper.prototype.dispose=function(){this.lightSphere.geometry.dispose();this.lightSphere.material.dispose()}; +THREE.HemisphereLightHelper.prototype.update=function(){var a=new THREE.Vector3;return function(){this.colors[0].copy(this.light.color).multiplyScalar(this.light.intensity);this.colors[1].copy(this.light.groundColor).multiplyScalar(this.light.intensity);this.lightSphere.lookAt(a.setFromMatrixPosition(this.light.matrixWorld).negate());this.lightSphere.geometry.colorsNeedUpdate=!0}}(); +THREE.PointLightHelper=function(a,b){this.light=a;this.light.updateMatrixWorld();var c=new THREE.SphereGeometry(b,4,2),d=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);THREE.Mesh.call(this,c,d);this.matrix=this.light.matrixWorld;this.matrixAutoUpdate=!1};THREE.PointLightHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.PointLightHelper.prototype.dispose=function(){this.geometry.dispose();this.material.dispose()}; +THREE.PointLightHelper.prototype.update=function(){this.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)}; +THREE.SkeletonHelper=function(a){this.bones=this.getBoneList(a);for(var b=new THREE.Geometry,c=0;cs;s++){d[0]=t[g[s]];d[1]=t[g[(s+1)%3]];d.sort(f);var u=d.toString();void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++)}d=new Float32Array(6*p);m=0;for(r=p;ms;s++)p= +k[q[2*m+s]],g=6*m+3*s,d[g+0]=p.x,d[g+1]=p.y,d[g+2]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3))}else if(a.geometry instanceof THREE.BufferGeometry){if(void 0!==a.geometry.attributes.index){k=a.geometry.attributes.position.array;r=a.geometry.attributes.index.array;n=a.geometry.drawcalls;p=0;0===n.length&&(n=[{count:r.length,index:0,start:0}]);for(var q=new Uint32Array(2*r.length),t=0,v=n.length;ts;s++)d[0]= +g+r[m+s],d[1]=g+r[m+(s+1)%3],d.sort(f),u=d.toString(),void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++);d=new Float32Array(6*p);m=0;for(r=p;ms;s++)g=6*m+3*s,p=3*q[2*m+s],d[g+0]=k[p],d[g+1]=k[p+1],d[g+2]=k[p+2]}else for(k=a.geometry.attributes.position.array,p=k.length/3,q=p/3,d=new Float32Array(6*p),m=0,r=q;ms;s++)g=18*m+6*s,q=9*m+3*s,d[g+0]=k[q],d[g+1]=k[q+1],d[g+2]=k[q+2],p=9*m+(s+1)%3*3,d[g+3]=k[p],d[g+4]=k[p+1],d[g+5]=k[p+2];h.addAttribute("position",new THREE.BufferAttribute(d, +3))}THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.WireframeHelper.prototype=Object.create(THREE.Line.prototype);THREE.ImmediateRenderObject=function(){THREE.Object3D.call(this);this.render=function(a){}};THREE.ImmediateRenderObject.prototype=Object.create(THREE.Object3D.prototype); +THREE.MorphBlendMesh=function(a,b){THREE.Mesh.call(this,a,b);this.animationsMap={};this.animationsList=[];var c=this.geometry.morphTargets.length;this.createAnimation("__default",0,c-1,c/1);this.setAnimationWeight("__default",1)};THREE.MorphBlendMesh.prototype=Object.create(THREE.Mesh.prototype); +THREE.MorphBlendMesh.prototype.createAnimation=function(a,b,c,d){b={startFrame:b,endFrame:c,length:c-b+1,fps:d,duration:(c-b)/d,lastFrame:0,currentFrame:0,active:!1,time:0,direction:1,weight:1,directionBackwards:!1,mirroredLoop:!1};this.animationsMap[a]=b;this.animationsList.push(b)}; +THREE.MorphBlendMesh.prototype.autoCreateAnimations=function(a){for(var b=/([a-z]+)_?(\d+)/,c,d={},e=this.geometry,f=0,g=e.morphTargets.length;fh.end&&(h.end=f);c||(c=k)}}for(k in d)h=d[k],this.createAnimation(k,h.start,h.end,a);this.firstAnimation=c}; +THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=this.animationsMap[a])a.direction=1,a.directionBackwards=!1};THREE.MorphBlendMesh.prototype.setAnimationDirectionBackward=function(a){if(a=this.animationsMap[a])a.direction=-1,a.directionBackwards=!0};THREE.MorphBlendMesh.prototype.setAnimationFPS=function(a,b){var c=this.animationsMap[a];c&&(c.fps=b,c.duration=(c.end-c.start)/c.fps)}; +THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b}; +THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):console.warn("animation["+a+"] undefined")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1}; +THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;bd.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g=d.weight; +f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; \ No newline at end of file diff --git a/plugins/Sidebar/media-globe/globe.js b/plugins/Sidebar/media-globe/globe.js new file mode 100644 index 00000000..eab71f9e --- /dev/null +++ b/plugins/Sidebar/media-globe/globe.js @@ -0,0 +1,435 @@ +/** + * dat.globe Javascript WebGL Globe Toolkit + * http://dataarts.github.com/dat.globe + * + * Copyright 2011 Data Arts Team, Google Creative Lab + * + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ + +var DAT = DAT || {}; + +DAT.Globe = function(container, opts) { + opts = opts || {}; + + var colorFn = opts.colorFn || function(x) { + var c = new THREE.Color(); + c.setHSL( ( 0.5 - (x * 2) ), Math.max(0.8, 1.0 - (x * 3)), 0.5 ); + return c; + }; + var imgDir = opts.imgDir || '/globe/'; + + var Shaders = { + 'earth' : { + uniforms: { + 'texture': { type: 't', value: null } + }, + vertexShader: [ + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + 'vNormal = normalize( normalMatrix * normal );', + 'vUv = uv;', + '}' + ].join('\n'), + fragmentShader: [ + 'uniform sampler2D texture;', + 'varying vec3 vNormal;', + 'varying vec2 vUv;', + 'void main() {', + 'vec3 diffuse = texture2D( texture, vUv ).xyz;', + 'float intensity = 1.05 - dot( vNormal, vec3( 0.0, 0.0, 1.0 ) );', + 'vec3 atmosphere = vec3( 1.0, 1.0, 1.0 ) * pow( intensity, 3.0 );', + 'gl_FragColor = vec4( diffuse + atmosphere, 1.0 );', + '}' + ].join('\n') + }, + 'atmosphere' : { + uniforms: {}, + vertexShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'vNormal = normalize( normalMatrix * normal );', + 'gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );', + '}' + ].join('\n'), + fragmentShader: [ + 'varying vec3 vNormal;', + 'void main() {', + 'float intensity = pow( 0.8 - dot( vNormal, vec3( 0, 0, 1.0 ) ), 12.0 );', + 'gl_FragColor = vec4( 1.0, 1.0, 1.0, 1.0 ) * intensity;', + '}' + ].join('\n') + } + }; + + var camera, scene, renderer, w, h; + var mesh, atmosphere, point, running; + + var overRenderer; + var running = true; + + var curZoomSpeed = 0; + var zoomSpeed = 50; + + var mouse = { x: 0, y: 0 }, mouseOnDown = { x: 0, y: 0 }; + var rotation = { x: 0, y: 0 }, + target = { x: Math.PI*3/2, y: Math.PI / 6.0 }, + targetOnDown = { x: 0, y: 0 }; + + var distance = 100000, distanceTarget = 100000; + var padding = 10; + var PI_HALF = Math.PI / 2; + + function init() { + + container.style.color = '#fff'; + container.style.font = '13px/20px Arial, sans-serif'; + + var shader, uniforms, material; + w = container.offsetWidth || window.innerWidth; + h = container.offsetHeight || window.innerHeight; + + camera = new THREE.PerspectiveCamera(30, w / h, 1, 10000); + camera.position.z = distance; + + scene = new THREE.Scene(); + + var geometry = new THREE.SphereGeometry(200, 40, 30); + + shader = Shaders['earth']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + uniforms['texture'].value = THREE.ImageUtils.loadTexture(imgDir+'world.jpg'); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.rotation.y = Math.PI; + scene.add(mesh); + + shader = Shaders['atmosphere']; + uniforms = THREE.UniformsUtils.clone(shader.uniforms); + + material = new THREE.ShaderMaterial({ + + uniforms: uniforms, + vertexShader: shader.vertexShader, + fragmentShader: shader.fragmentShader, + side: THREE.BackSide, + blending: THREE.AdditiveBlending, + transparent: true + + }); + + mesh = new THREE.Mesh(geometry, material); + mesh.scale.set( 1.1, 1.1, 1.1 ); + scene.add(mesh); + + geometry = new THREE.BoxGeometry(2.75, 2.75, 1); + geometry.applyMatrix(new THREE.Matrix4().makeTranslation(0,0,-0.5)); + + point = new THREE.Mesh(geometry); + + renderer = new THREE.WebGLRenderer({antialias: true}); + renderer.setSize(w, h); + renderer.setClearColor( 0x212121, 1 ); + + renderer.domElement.style.position = 'relative'; + + container.appendChild(renderer.domElement); + + container.addEventListener('mousedown', onMouseDown, false); + + if ('onwheel' in document) { + container.addEventListener('wheel', onMouseWheel, false); + } else { + container.addEventListener('mousewheel', onMouseWheel, false); + } + + document.addEventListener('keydown', onDocumentKeyDown, false); + + window.addEventListener('resize', onWindowResize, false); + + container.addEventListener('mouseover', function() { + overRenderer = true; + }, false); + + container.addEventListener('mouseout', function() { + overRenderer = false; + }, false); + } + + function addData(data, opts) { + var lat, lng, size, color, i, step, colorFnWrapper; + + opts.animated = opts.animated || false; + this.is_animated = opts.animated; + opts.format = opts.format || 'magnitude'; // other option is 'legend' + if (opts.format === 'magnitude') { + step = 3; + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else if (opts.format === 'legend') { + step = 4; + colorFnWrapper = function(data, i) { return colorFn(data[i+3]); } + } else if (opts.format === 'peer') { + colorFnWrapper = function(data, i) { return colorFn(data[i+2]); } + } else { + throw('error: format not supported: '+opts.format); + } + + if (opts.animated) { + if (this._baseGeometry === undefined) { + this._baseGeometry = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; +// size = data[i + 2]; + color = colorFnWrapper(data,i); + size = 0; + addPoint(lat, lng, size, color, this._baseGeometry); + } + } + if(this._morphTargetId === undefined) { + this._morphTargetId = 0; + } else { + this._morphTargetId += 1; + } + opts.name = opts.name || 'morphTarget'+this._morphTargetId; + } + var subgeo = new THREE.Geometry(); + for (i = 0; i < data.length; i += step) { + lat = data[i]; + lng = data[i + 1]; + color = colorFnWrapper(data,i); + size = data[i + 2]; + size = size*200; + addPoint(lat, lng, size, color, subgeo); + } + if (opts.animated) { + this._baseGeometry.morphTargets.push({'name': opts.name, vertices: subgeo.vertices}); + } else { + this._baseGeometry = subgeo; + } + + }; + + function createPoints() { + if (this._baseGeometry !== undefined) { + if (this.is_animated === false) { + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: false + })); + } else { + if (this._baseGeometry.morphTargets.length < 8) { + console.log('t l',this._baseGeometry.morphTargets.length); + var padding = 8-this._baseGeometry.morphTargets.length; + console.log('padding', padding); + for(var i=0; i<=padding; i++) { + console.log('padding',i); + this._baseGeometry.morphTargets.push({'name': 'morphPadding'+i, vertices: this._baseGeometry.vertices}); + } + } + this.points = new THREE.Mesh(this._baseGeometry, new THREE.MeshBasicMaterial({ + color: 0xffffff, + vertexColors: THREE.FaceColors, + morphTargets: true + })); + } + scene.add(this.points); + } + } + + function addPoint(lat, lng, size, color, subgeo) { + + var phi = (90 - lat) * Math.PI / 180; + var theta = (180 - lng) * Math.PI / 180; + + point.position.x = 200 * Math.sin(phi) * Math.cos(theta); + point.position.y = 200 * Math.cos(phi); + point.position.z = 200 * Math.sin(phi) * Math.sin(theta); + + point.lookAt(mesh.position); + + point.scale.z = Math.max( size, 0.1 ); // avoid non-invertible matrix + point.updateMatrix(); + + for (var i = 0; i < point.geometry.faces.length; i++) { + + point.geometry.faces[i].color = color; + + } + if(point.matrixAutoUpdate){ + point.updateMatrix(); + } + subgeo.merge(point.geometry, point.matrix); + } + + function onMouseDown(event) { + event.preventDefault(); + + container.addEventListener('mousemove', onMouseMove, false); + container.addEventListener('mouseup', onMouseUp, false); + container.addEventListener('mouseout', onMouseOut, false); + + mouseOnDown.x = - event.clientX; + mouseOnDown.y = event.clientY; + + targetOnDown.x = target.x; + targetOnDown.y = target.y; + + container.style.cursor = 'move'; + } + + function onMouseMove(event) { + mouse.x = - event.clientX; + mouse.y = event.clientY; + + var zoomDamp = distance/1000; + + target.x = targetOnDown.x + (mouse.x - mouseOnDown.x) * 0.005 * zoomDamp; + target.y = targetOnDown.y + (mouse.y - mouseOnDown.y) * 0.005 * zoomDamp; + + target.y = target.y > PI_HALF ? PI_HALF : target.y; + target.y = target.y < - PI_HALF ? - PI_HALF : target.y; + } + + function onMouseUp(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + container.style.cursor = 'auto'; + } + + function onMouseOut(event) { + container.removeEventListener('mousemove', onMouseMove, false); + container.removeEventListener('mouseup', onMouseUp, false); + container.removeEventListener('mouseout', onMouseOut, false); + } + + function onMouseWheel(event) { + event.preventDefault(); + if (overRenderer) { + if (event.deltaY) { + zoom(-event.deltaY * (event.deltaMode == 0 ? 1 : 50)); + } else { + zoom(event.wheelDeltaY * 0.3); + } + } + return false; + } + + function onDocumentKeyDown(event) { + switch (event.keyCode) { + case 38: + zoom(100); + event.preventDefault(); + break; + case 40: + zoom(-100); + event.preventDefault(); + break; + } + } + + function onWindowResize( event ) { + camera.aspect = container.offsetWidth / container.offsetHeight; + camera.updateProjectionMatrix(); + renderer.setSize( container.offsetWidth, container.offsetHeight ); + } + + function zoom(delta) { + distanceTarget -= delta; + distanceTarget = distanceTarget > 855 ? 855 : distanceTarget; + distanceTarget = distanceTarget < 350 ? 350 : distanceTarget; + } + + function animate() { + if (!running) return + requestAnimationFrame(animate); + render(); + } + + function render() { + zoom(curZoomSpeed); + + rotation.x += (target.x - rotation.x) * 0.1; + rotation.y += (target.y - rotation.y) * 0.1; + distance += (distanceTarget - distance) * 0.3; + + camera.position.x = distance * Math.sin(rotation.x) * Math.cos(rotation.y); + camera.position.y = distance * Math.sin(rotation.y); + camera.position.z = distance * Math.cos(rotation.x) * Math.cos(rotation.y); + + camera.lookAt(mesh.position); + + renderer.render(scene, camera); + } + + function unload() { + running = false + container.removeEventListener('mousedown', onMouseDown, false); + if ('onwheel' in document) { + container.removeEventListener('wheel', onMouseWheel, false); + } else { + container.removeEventListener('mousewheel', onMouseWheel, false); + } + document.removeEventListener('keydown', onDocumentKeyDown, false); + window.removeEventListener('resize', onWindowResize, false); + + } + + init(); + this.animate = animate; + this.unload = unload; + + + this.__defineGetter__('time', function() { + return this._time || 0; + }); + + this.__defineSetter__('time', function(t) { + var validMorphs = []; + var morphDict = this.points.morphTargetDictionary; + for(var k in morphDict) { + if(k.indexOf('morphPadding') < 0) { + validMorphs.push(morphDict[k]); + } + } + validMorphs.sort(); + var l = validMorphs.length-1; + var scaledt = t*l+1; + var index = Math.floor(scaledt); + for (i=0;i= 0) { + this.points.morphTargetInfluences[lastIndex] = 1 - leftover; + } + this.points.morphTargetInfluences[index] = leftover; + this._time = t; + }); + + this.addData = addData; + this.createPoints = createPoints; + this.renderer = renderer; + this.scene = scene; + + return this; + +}; diff --git a/plugins/Sidebar/media-globe/three.min.js b/plugins/Sidebar/media-globe/three.min.js new file mode 100644 index 00000000..a88b4afa --- /dev/null +++ b/plugins/Sidebar/media-globe/three.min.js @@ -0,0 +1,814 @@ +// threejs.org/license +'use strict';var THREE={REVISION:"69"};"object"===typeof module&&(module.exports=THREE);void 0===Math.sign&&(Math.sign=function(a){return 0>a?-1:0>16&255)/255;this.g=(a>>8&255)/255;this.b=(a&255)/255;return this},setRGB:function(a,b,c){this.r=a;this.g=b;this.b=c;return this},setHSL:function(a,b,c){if(0===b)this.r=this.g=this.b=c;else{var d=function(a,b,c){0>c&&(c+=1);1c?b:c<2/3?a+6*(b-a)*(2/3-c):a};b=.5>=c?c*(1+b):c+b-c*b;c=2*c-b;this.r=d(c,b,a+1/3);this.g=d(c,b,a);this.b=d(c,b,a-1/3)}return this},setStyle:function(a){if(/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.test(a))return a=/^rgb\((\d+), ?(\d+), ?(\d+)\)$/i.exec(a),this.r=Math.min(255,parseInt(a[1],10))/255,this.g=Math.min(255,parseInt(a[2],10))/255,this.b=Math.min(255,parseInt(a[3],10))/255,this;if(/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.test(a))return a=/^rgb\((\d+)\%, ?(\d+)\%, ?(\d+)\%\)$/i.exec(a),this.r= +Math.min(100,parseInt(a[1],10))/100,this.g=Math.min(100,parseInt(a[2],10))/100,this.b=Math.min(100,parseInt(a[3],10))/100,this;if(/^\#([0-9a-f]{6})$/i.test(a))return a=/^\#([0-9a-f]{6})$/i.exec(a),this.setHex(parseInt(a[1],16)),this;if(/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.test(a))return a=/^\#([0-9a-f])([0-9a-f])([0-9a-f])$/i.exec(a),this.setHex(parseInt(a[1]+a[1]+a[2]+a[2]+a[3]+a[3],16)),this;if(/^(\w+)$/i.test(a))return this.setHex(THREE.ColorKeywords[a]),this},copy:function(a){this.r=a.r;this.g= +a.g;this.b=a.b;return this},copyGammaToLinear:function(a){this.r=a.r*a.r;this.g=a.g*a.g;this.b=a.b*a.b;return this},copyLinearToGamma:function(a){this.r=Math.sqrt(a.r);this.g=Math.sqrt(a.g);this.b=Math.sqrt(a.b);return this},convertGammaToLinear:function(){var a=this.r,b=this.g,c=this.b;this.r=a*a;this.g=b*b;this.b=c*c;return this},convertLinearToGamma:function(){this.r=Math.sqrt(this.r);this.g=Math.sqrt(this.g);this.b=Math.sqrt(this.b);return this},getHex:function(){return 255*this.r<<16^255*this.g<< +8^255*this.b<<0},getHexString:function(){return("000000"+this.getHex().toString(16)).slice(-6)},getHSL:function(a){a=a||{h:0,s:0,l:0};var b=this.r,c=this.g,d=this.b,e=Math.max(b,c,d),f=Math.min(b,c,d),g,h=(f+e)/2;if(f===e)f=g=0;else{var k=e-f,f=.5>=h?k/(e+f):k/(2-e-f);switch(e){case b:g=(c-d)/k+(cf&&c>b?(c=2*Math.sqrt(1+c-f-b),this._w=(k-g)/c,this._x=.25*c,this._y=(a+e)/c,this._z=(d+h)/c):f>b?(c=2*Math.sqrt(1+f-c-b),this._w=(d-h)/c,this._x=(a+e)/c,this._y= +.25*c,this._z=(g+k)/c):(c=2*Math.sqrt(1+b-c-f),this._w=(e-a)/c,this._x=(d+h)/c,this._y=(g+k)/c,this._z=.25*c);this.onChangeCallback();return this},setFromUnitVectors:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3);b=c.dot(d)+1;1E-6>b?(b=0,Math.abs(c.x)>Math.abs(c.z)?a.set(-c.y,c.x,0):a.set(0,-c.z,c.y)):a.crossVectors(c,d);this._x=a.x;this._y=a.y;this._z=a.z;this._w=b;this.normalize();return this}}(),inverse:function(){this.conjugate().normalize();return this},conjugate:function(){this._x*= +-1;this._y*=-1;this._z*=-1;this.onChangeCallback();return this},dot:function(a){return this._x*a._x+this._y*a._y+this._z*a._z+this._w*a._w},lengthSq:function(){return this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w},length:function(){return Math.sqrt(this._x*this._x+this._y*this._y+this._z*this._z+this._w*this._w)},normalize:function(){var a=this.length();0===a?(this._z=this._y=this._x=0,this._w=1):(a=1/a,this._x*=a,this._y*=a,this._z*=a,this._w*=a);this.onChangeCallback();return this}, +multiply:function(a,b){return void 0!==b?(console.warn("THREE.Quaternion: .multiply() now only accepts one argument. Use .multiplyQuaternions( a, b ) instead."),this.multiplyQuaternions(a,b)):this.multiplyQuaternions(this,a)},multiplyQuaternions:function(a,b){var c=a._x,d=a._y,e=a._z,f=a._w,g=b._x,h=b._y,k=b._z,n=b._w;this._x=c*n+f*g+d*k-e*h;this._y=d*n+f*h+e*g-c*k;this._z=e*n+f*k+c*h-d*g;this._w=f*n-c*g-d*h-e*k;this.onChangeCallback();return this},multiplyVector3:function(a){console.warn("THREE.Quaternion: .multiplyVector3() has been removed. Use is now vector.applyQuaternion( quaternion ) instead."); +return a.applyQuaternion(this)},slerp:function(a,b){if(0===b)return this;if(1===b)return this.copy(a);var c=this._x,d=this._y,e=this._z,f=this._w,g=f*a._w+c*a._x+d*a._y+e*a._z;0>g?(this._w=-a._w,this._x=-a._x,this._y=-a._y,this._z=-a._z,g=-g):this.copy(a);if(1<=g)return this._w=f,this._x=c,this._y=d,this._z=e,this;var h=Math.acos(g),k=Math.sqrt(1-g*g);if(.001>Math.abs(k))return this._w=.5*(f+this._w),this._x=.5*(c+this._x),this._y=.5*(d+this._y),this._z=.5*(e+this._z),this;g=Math.sin((1-b)*h)/k;h= +Math.sin(b*h)/k;this._w=f*g+this._w*h;this._x=c*g+this._x*h;this._y=d*g+this._y*h;this._z=e*g+this._z*h;this.onChangeCallback();return this},equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._w===this._w},fromArray:function(a,b){void 0===b&&(b=0);this._x=a[b];this._y=a[b+1];this._z=a[b+2];this._w=a[b+3];this.onChangeCallback();return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this._x;a[b+1]=this._y;a[b+2]=this._z;a[b+3]=this._w;return a},onChange:function(a){this.onChangeCallback= +a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Quaternion(this._x,this._y,this._z,this._w)}};THREE.Quaternion.slerp=function(a,b,c,d){return c.copy(a).slerp(b,d)};THREE.Vector2=function(a,b){this.x=a||0;this.y=b||0}; +THREE.Vector2.prototype={constructor:THREE.Vector2,set:function(a,b){this.x=a;this.y=b;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;return this},add:function(a, +b){if(void 0!==b)return console.warn("THREE.Vector2: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;return this},addScalar:function(a){this.x+=a;this.y+=a;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector2: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;return this}, +subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;return this},multiply:function(a){this.x*=a.x;this.y*=a.y;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;return this},divide:function(a){this.x/=a.x;this.y/=a.y;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a):this.y=this.x=0;return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector2,b=new THREE.Vector2);a.set(c,c);b.set(d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);return this}, +roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);return this},negate:function(){this.x=-this.x;this.y=-this.y;return this},dot:function(a){return this.x*a.x+this.y*a.y},lengthSq:function(){return this.x*this.x+this.y*this.y},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y)},normalize:function(){return this.divideScalar(this.length())},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b= +this.x-a.x;a=this.y-a.y;return b*b+a*a},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;return a},clone:function(){return new THREE.Vector2(this.x,this.y)}}; +THREE.Vector3=function(a,b,c){this.x=a||0;this.y=b||0;this.z=c||0}; +THREE.Vector3.prototype={constructor:THREE.Vector3,set:function(a,b,c){this.x=a;this.y=b;this.z=c;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x;case 1:return this.y;case 2:return this.z;default:throw Error("index is out of range: "+ +a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;return this},addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."), +this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;return this},multiply:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .multiply() now only accepts one argument. Use .multiplyVectors( a, b ) instead."),this.multiplyVectors(a,b);this.x*=a.x;this.y*=a.y;this.z*=a.z;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;return this},multiplyVectors:function(a,b){this.x=a.x*b.x;this.y= +a.y*b.y;this.z=a.z*b.z;return this},applyEuler:function(){var a;return function(b){!1===b instanceof THREE.Euler&&console.error("THREE.Vector3: .applyEuler() now expects a Euler rotation rather than a Vector3 and order.");void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromEuler(b));return this}}(),applyAxisAngle:function(){var a;return function(b,c){void 0===a&&(a=new THREE.Quaternion);this.applyQuaternion(a.setFromAxisAngle(b,c));return this}}(),applyMatrix3:function(a){var b=this.x, +c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[3]*c+a[6]*d;this.y=a[1]*b+a[4]*c+a[7]*d;this.z=a[2]*b+a[5]*c+a[8]*d;return this},applyMatrix4:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12];this.y=a[1]*b+a[5]*c+a[9]*d+a[13];this.z=a[2]*b+a[6]*c+a[10]*d+a[14];return this},applyProjection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;var e=1/(a[3]*b+a[7]*c+a[11]*d+a[15]);this.x=(a[0]*b+a[4]*c+a[8]*d+a[12])*e;this.y=(a[1]*b+a[5]*c+a[9]*d+a[13])*e;this.z= +(a[2]*b+a[6]*c+a[10]*d+a[14])*e;return this},applyQuaternion:function(a){var b=this.x,c=this.y,d=this.z,e=a.x,f=a.y,g=a.z;a=a.w;var h=a*b+f*d-g*c,k=a*c+g*b-e*d,n=a*d+e*c-f*b,b=-e*b-f*c-g*d;this.x=h*a+b*-e+k*-g-n*-f;this.y=k*a+b*-f+n*-e-h*-g;this.z=n*a+b*-g+h*-f-k*-e;return this},project:function(){var a;return function(b){void 0===a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.projectionMatrix,a.getInverse(b.matrixWorld));return this.applyProjection(a)}}(),unproject:function(){var a;return function(b){void 0=== +a&&(a=new THREE.Matrix4);a.multiplyMatrices(b.matrixWorld,a.getInverse(b.projectionMatrix));return this.applyProjection(a)}}(),transformDirection:function(a){var b=this.x,c=this.y,d=this.z;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d;this.y=a[1]*b+a[5]*c+a[9]*d;this.z=a[2]*b+a[6]*c+a[10]*d;this.normalize();return this},divide:function(a){this.x/=a.x;this.y/=a.y;this.z/=a.z;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a):this.z=this.y=this.x=0;return this},min:function(a){this.x> +a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector3,b=new THREE.Vector3);a.set(c,c,c);b.set(d,d,d);return this.clamp(a, +b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)},normalize:function(){return this.divideScalar(this.length())},setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/ +b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;return this},cross:function(a,b){if(void 0!==b)return console.warn("THREE.Vector3: .cross() now only accepts one argument. Use .crossVectors( a, b ) instead."),this.crossVectors(a,b);var c=this.x,d=this.y,e=this.z;this.x=d*a.z-e*a.y;this.y=e*a.x-c*a.z;this.z=c*a.y-d*a.x;return this},crossVectors:function(a,b){var c=a.x,d=a.y,e=a.z,f=b.x,g=b.y,h=b.z;this.x=d*h-e*g;this.y=e*f-c*h;this.z=c*g-d*f;return this}, +projectOnVector:function(){var a,b;return function(c){void 0===a&&(a=new THREE.Vector3);a.copy(c).normalize();b=this.dot(a);return this.copy(a).multiplyScalar(b)}}(),projectOnPlane:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);a.copy(this).projectOnVector(b);return this.sub(a)}}(),reflect:function(){var a;return function(b){void 0===a&&(a=new THREE.Vector3);return this.sub(a.copy(b).multiplyScalar(2*this.dot(b)))}}(),angleTo:function(a){a=this.dot(a)/(this.length()*a.length()); +return Math.acos(THREE.Math.clamp(a,-1,1))},distanceTo:function(a){return Math.sqrt(this.distanceToSquared(a))},distanceToSquared:function(a){var b=this.x-a.x,c=this.y-a.y;a=this.z-a.z;return b*b+c*c+a*a},setEulerFromRotationMatrix:function(a,b){console.error("THREE.Vector3: .setEulerFromRotationMatrix() has been removed. Use Euler.setFromRotationMatrix() instead.")},setEulerFromQuaternion:function(a,b){console.error("THREE.Vector3: .setEulerFromQuaternion() has been removed. Use Euler.setFromQuaternion() instead.")}, +getPositionFromMatrix:function(a){console.warn("THREE.Vector3: .getPositionFromMatrix() has been renamed to .setFromMatrixPosition().");return this.setFromMatrixPosition(a)},getScaleFromMatrix:function(a){console.warn("THREE.Vector3: .getScaleFromMatrix() has been renamed to .setFromMatrixScale().");return this.setFromMatrixScale(a)},getColumnFromMatrix:function(a,b){console.warn("THREE.Vector3: .getColumnFromMatrix() has been renamed to .setFromMatrixColumn().");return this.setFromMatrixColumn(a, +b)},setFromMatrixPosition:function(a){this.x=a.elements[12];this.y=a.elements[13];this.z=a.elements[14];return this},setFromMatrixScale:function(a){var b=this.set(a.elements[0],a.elements[1],a.elements[2]).length(),c=this.set(a.elements[4],a.elements[5],a.elements[6]).length();a=this.set(a.elements[8],a.elements[9],a.elements[10]).length();this.x=b;this.y=c;this.z=a;return this},setFromMatrixColumn:function(a,b){var c=4*a,d=b.elements;this.x=d[c];this.y=d[c+1];this.z=d[c+2];return this},equals:function(a){return a.x=== +this.x&&a.y===this.y&&a.z===this.z},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]=this.z;return a},clone:function(){return new THREE.Vector3(this.x,this.y,this.z)}};THREE.Vector4=function(a,b,c,d){this.x=a||0;this.y=b||0;this.z=c||0;this.w=void 0!==d?d:1}; +THREE.Vector4.prototype={constructor:THREE.Vector4,set:function(a,b,c,d){this.x=a;this.y=b;this.z=c;this.w=d;return this},setX:function(a){this.x=a;return this},setY:function(a){this.y=a;return this},setZ:function(a){this.z=a;return this},setW:function(a){this.w=a;return this},setComponent:function(a,b){switch(a){case 0:this.x=b;break;case 1:this.y=b;break;case 2:this.z=b;break;case 3:this.w=b;break;default:throw Error("index is out of range: "+a);}},getComponent:function(a){switch(a){case 0:return this.x; +case 1:return this.y;case 2:return this.z;case 3:return this.w;default:throw Error("index is out of range: "+a);}},copy:function(a){this.x=a.x;this.y=a.y;this.z=a.z;this.w=void 0!==a.w?a.w:1;return this},add:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .add() now only accepts one argument. Use .addVectors( a, b ) instead."),this.addVectors(a,b);this.x+=a.x;this.y+=a.y;this.z+=a.z;this.w+=a.w;return this},addScalar:function(a){this.x+=a;this.y+=a;this.z+=a;this.w+=a;return this}, +addVectors:function(a,b){this.x=a.x+b.x;this.y=a.y+b.y;this.z=a.z+b.z;this.w=a.w+b.w;return this},sub:function(a,b){if(void 0!==b)return console.warn("THREE.Vector4: .sub() now only accepts one argument. Use .subVectors( a, b ) instead."),this.subVectors(a,b);this.x-=a.x;this.y-=a.y;this.z-=a.z;this.w-=a.w;return this},subVectors:function(a,b){this.x=a.x-b.x;this.y=a.y-b.y;this.z=a.z-b.z;this.w=a.w-b.w;return this},multiplyScalar:function(a){this.x*=a;this.y*=a;this.z*=a;this.w*=a;return this},applyMatrix4:function(a){var b= +this.x,c=this.y,d=this.z,e=this.w;a=a.elements;this.x=a[0]*b+a[4]*c+a[8]*d+a[12]*e;this.y=a[1]*b+a[5]*c+a[9]*d+a[13]*e;this.z=a[2]*b+a[6]*c+a[10]*d+a[14]*e;this.w=a[3]*b+a[7]*c+a[11]*d+a[15]*e;return this},divideScalar:function(a){0!==a?(a=1/a,this.x*=a,this.y*=a,this.z*=a,this.w*=a):(this.z=this.y=this.x=0,this.w=1);return this},setAxisAngleFromQuaternion:function(a){this.w=2*Math.acos(a.w);var b=Math.sqrt(1-a.w*a.w);1E-4>b?(this.x=1,this.z=this.y=0):(this.x=a.x/b,this.y=a.y/b,this.z=a.z/b);return this}, +setAxisAngleFromRotationMatrix:function(a){var b,c,d;a=a.elements;var e=a[0];d=a[4];var f=a[8],g=a[1],h=a[5],k=a[9];c=a[2];b=a[6];var n=a[10];if(.01>Math.abs(d-g)&&.01>Math.abs(f-c)&&.01>Math.abs(k-b)){if(.1>Math.abs(d+g)&&.1>Math.abs(f+c)&&.1>Math.abs(k+b)&&.1>Math.abs(e+h+n-3))return this.set(1,0,0,0),this;a=Math.PI;e=(e+1)/2;h=(h+1)/2;n=(n+1)/2;d=(d+g)/4;f=(f+c)/4;k=(k+b)/4;e>h&&e>n?.01>e?(b=0,d=c=.707106781):(b=Math.sqrt(e),c=d/b,d=f/b):h>n?.01>h?(b=.707106781,c=0,d=.707106781):(c=Math.sqrt(h), +b=d/c,d=k/c):.01>n?(c=b=.707106781,d=0):(d=Math.sqrt(n),b=f/d,c=k/d);this.set(b,c,d,a);return this}a=Math.sqrt((b-k)*(b-k)+(f-c)*(f-c)+(g-d)*(g-d));.001>Math.abs(a)&&(a=1);this.x=(b-k)/a;this.y=(f-c)/a;this.z=(g-d)/a;this.w=Math.acos((e+h+n-1)/2);return this},min:function(a){this.x>a.x&&(this.x=a.x);this.y>a.y&&(this.y=a.y);this.z>a.z&&(this.z=a.z);this.w>a.w&&(this.w=a.w);return this},max:function(a){this.xb.x&&(this.x=b.x);this.yb.y&&(this.y=b.y);this.zb.z&&(this.z=b.z);this.wb.w&&(this.w=b.w);return this},clampScalar:function(){var a,b;return function(c,d){void 0===a&&(a=new THREE.Vector4,b=new THREE.Vector4);a.set(c,c,c,c);b.set(d,d,d,d);return this.clamp(a,b)}}(),floor:function(){this.x=Math.floor(this.x);this.y=Math.floor(this.y);this.z=Math.floor(this.z);this.w=Math.floor(this.w); +return this},ceil:function(){this.x=Math.ceil(this.x);this.y=Math.ceil(this.y);this.z=Math.ceil(this.z);this.w=Math.ceil(this.w);return this},round:function(){this.x=Math.round(this.x);this.y=Math.round(this.y);this.z=Math.round(this.z);this.w=Math.round(this.w);return this},roundToZero:function(){this.x=0>this.x?Math.ceil(this.x):Math.floor(this.x);this.y=0>this.y?Math.ceil(this.y):Math.floor(this.y);this.z=0>this.z?Math.ceil(this.z):Math.floor(this.z);this.w=0>this.w?Math.ceil(this.w):Math.floor(this.w); +return this},negate:function(){this.x=-this.x;this.y=-this.y;this.z=-this.z;this.w=-this.w;return this},dot:function(a){return this.x*a.x+this.y*a.y+this.z*a.z+this.w*a.w},lengthSq:function(){return this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w},length:function(){return Math.sqrt(this.x*this.x+this.y*this.y+this.z*this.z+this.w*this.w)},lengthManhattan:function(){return Math.abs(this.x)+Math.abs(this.y)+Math.abs(this.z)+Math.abs(this.w)},normalize:function(){return this.divideScalar(this.length())}, +setLength:function(a){var b=this.length();0!==b&&a!==b&&this.multiplyScalar(a/b);return this},lerp:function(a,b){this.x+=(a.x-this.x)*b;this.y+=(a.y-this.y)*b;this.z+=(a.z-this.z)*b;this.w+=(a.w-this.w)*b;return this},equals:function(a){return a.x===this.x&&a.y===this.y&&a.z===this.z&&a.w===this.w},fromArray:function(a,b){void 0===b&&(b=0);this.x=a[b];this.y=a[b+1];this.z=a[b+2];this.w=a[b+3];return this},toArray:function(a,b){void 0===a&&(a=[]);void 0===b&&(b=0);a[b]=this.x;a[b+1]=this.y;a[b+2]= +this.z;a[b+3]=this.w;return a},clone:function(){return new THREE.Vector4(this.x,this.y,this.z,this.w)}};THREE.Euler=function(a,b,c,d){this._x=a||0;this._y=b||0;this._z=c||0;this._order=d||THREE.Euler.DefaultOrder};THREE.Euler.RotationOrders="XYZ YZX ZXY XZY YXZ ZYX".split(" ");THREE.Euler.DefaultOrder="XYZ"; +THREE.Euler.prototype={constructor:THREE.Euler,_x:0,_y:0,_z:0,_order:THREE.Euler.DefaultOrder,get x(){return this._x},set x(a){this._x=a;this.onChangeCallback()},get y(){return this._y},set y(a){this._y=a;this.onChangeCallback()},get z(){return this._z},set z(a){this._z=a;this.onChangeCallback()},get order(){return this._order},set order(a){this._order=a;this.onChangeCallback()},set:function(a,b,c,d){this._x=a;this._y=b;this._z=c;this._order=d||this._order;this.onChangeCallback();return this},copy:function(a){this._x= +a._x;this._y=a._y;this._z=a._z;this._order=a._order;this.onChangeCallback();return this},setFromRotationMatrix:function(a,b){var c=THREE.Math.clamp,d=a.elements,e=d[0],f=d[4],g=d[8],h=d[1],k=d[5],n=d[9],p=d[2],q=d[6],d=d[10];b=b||this._order;"XYZ"===b?(this._y=Math.asin(c(g,-1,1)),.99999>Math.abs(g)?(this._x=Math.atan2(-n,d),this._z=Math.atan2(-f,e)):(this._x=Math.atan2(q,k),this._z=0)):"YXZ"===b?(this._x=Math.asin(-c(n,-1,1)),.99999>Math.abs(n)?(this._y=Math.atan2(g,d),this._z=Math.atan2(h,k)):(this._y= +Math.atan2(-p,e),this._z=0)):"ZXY"===b?(this._x=Math.asin(c(q,-1,1)),.99999>Math.abs(q)?(this._y=Math.atan2(-p,d),this._z=Math.atan2(-f,k)):(this._y=0,this._z=Math.atan2(h,e))):"ZYX"===b?(this._y=Math.asin(-c(p,-1,1)),.99999>Math.abs(p)?(this._x=Math.atan2(q,d),this._z=Math.atan2(h,e)):(this._x=0,this._z=Math.atan2(-f,k))):"YZX"===b?(this._z=Math.asin(c(h,-1,1)),.99999>Math.abs(h)?(this._x=Math.atan2(-n,k),this._y=Math.atan2(-p,e)):(this._x=0,this._y=Math.atan2(g,d))):"XZY"===b?(this._z=Math.asin(-c(f, +-1,1)),.99999>Math.abs(f)?(this._x=Math.atan2(q,k),this._y=Math.atan2(g,e)):(this._x=Math.atan2(-n,d),this._y=0)):console.warn("THREE.Euler: .setFromRotationMatrix() given unsupported order: "+b);this._order=b;this.onChangeCallback();return this},setFromQuaternion:function(a,b,c){var d=THREE.Math.clamp,e=a.x*a.x,f=a.y*a.y,g=a.z*a.z,h=a.w*a.w;b=b||this._order;"XYZ"===b?(this._x=Math.atan2(2*(a.x*a.w-a.y*a.z),h-e-f+g),this._y=Math.asin(d(2*(a.x*a.z+a.y*a.w),-1,1)),this._z=Math.atan2(2*(a.z*a.w-a.x* +a.y),h+e-f-g)):"YXZ"===b?(this._x=Math.asin(d(2*(a.x*a.w-a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h-e-f+g),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h-e+f-g)):"ZXY"===b?(this._x=Math.asin(d(2*(a.x*a.w+a.y*a.z),-1,1)),this._y=Math.atan2(2*(a.y*a.w-a.z*a.x),h-e-f+g),this._z=Math.atan2(2*(a.z*a.w-a.x*a.y),h-e+f-g)):"ZYX"===b?(this._x=Math.atan2(2*(a.x*a.w+a.z*a.y),h-e-f+g),this._y=Math.asin(d(2*(a.y*a.w-a.x*a.z),-1,1)),this._z=Math.atan2(2*(a.x*a.y+a.z*a.w),h+e-f-g)):"YZX"===b?(this._x=Math.atan2(2* +(a.x*a.w-a.z*a.y),h-e+f-g),this._y=Math.atan2(2*(a.y*a.w-a.x*a.z),h+e-f-g),this._z=Math.asin(d(2*(a.x*a.y+a.z*a.w),-1,1))):"XZY"===b?(this._x=Math.atan2(2*(a.x*a.w+a.y*a.z),h-e+f-g),this._y=Math.atan2(2*(a.x*a.z+a.y*a.w),h+e-f-g),this._z=Math.asin(d(2*(a.z*a.w-a.x*a.y),-1,1))):console.warn("THREE.Euler: .setFromQuaternion() given unsupported order: "+b);this._order=b;if(!1!==c)this.onChangeCallback();return this},reorder:function(){var a=new THREE.Quaternion;return function(b){a.setFromEuler(this); +this.setFromQuaternion(a,b)}}(),equals:function(a){return a._x===this._x&&a._y===this._y&&a._z===this._z&&a._order===this._order},fromArray:function(a){this._x=a[0];this._y=a[1];this._z=a[2];void 0!==a[3]&&(this._order=a[3]);this.onChangeCallback();return this},toArray:function(){return[this._x,this._y,this._z,this._order]},onChange:function(a){this.onChangeCallback=a;return this},onChangeCallback:function(){},clone:function(){return new THREE.Euler(this._x,this._y,this._z,this._order)}}; +THREE.Line3=function(a,b){this.start=void 0!==a?a:new THREE.Vector3;this.end=void 0!==b?b:new THREE.Vector3}; +THREE.Line3.prototype={constructor:THREE.Line3,set:function(a,b){this.start.copy(a);this.end.copy(b);return this},copy:function(a){this.start.copy(a.start);this.end.copy(a.end);return this},center:function(a){return(a||new THREE.Vector3).addVectors(this.start,this.end).multiplyScalar(.5)},delta:function(a){return(a||new THREE.Vector3).subVectors(this.end,this.start)},distanceSq:function(){return this.start.distanceToSquared(this.end)},distance:function(){return this.start.distanceTo(this.end)},at:function(a, +b){var c=b||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},closestPointToPointParameter:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d){a.subVectors(c,this.start);b.subVectors(this.end,this.start);var e=b.dot(b),e=b.dot(a)/e;d&&(e=THREE.Math.clamp(e,0,1));return e}}(),closestPointToPoint:function(a,b,c){a=this.closestPointToPointParameter(a,b);c=c||new THREE.Vector3;return this.delta(c).multiplyScalar(a).add(this.start)},applyMatrix4:function(a){this.start.applyMatrix4(a); +this.end.applyMatrix4(a);return this},equals:function(a){return a.start.equals(this.start)&&a.end.equals(this.end)},clone:function(){return(new THREE.Line3).copy(this)}};THREE.Box2=function(a,b){this.min=void 0!==a?a:new THREE.Vector2(Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector2(-Infinity,-Infinity)}; +THREE.Box2.prototype={constructor:THREE.Box2,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector2).set((a.x-this.min.x)/(this.max.x-this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.y +this.max.y?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector2).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector2;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},translate:function(a){this.min.add(a);this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&& +a.max.equals(this.max)},clone:function(){return(new THREE.Box2).copy(this)}};THREE.Box3=function(a,b){this.min=void 0!==a?a:new THREE.Vector3(Infinity,Infinity,Infinity);this.max=void 0!==b?b:new THREE.Vector3(-Infinity,-Infinity,-Infinity)}; +THREE.Box3.prototype={constructor:THREE.Box3,set:function(a,b){this.min.copy(a);this.max.copy(b);return this},setFromPoints:function(a){this.makeEmpty();for(var b=0,c=a.length;bthis.max.x||a.ythis.max.y||a.zthis.max.z?!1:!0},containsBox:function(a){return this.min.x<=a.min.x&&a.max.x<=this.max.x&&this.min.y<=a.min.y&&a.max.y<=this.max.y&&this.min.z<=a.min.z&&a.max.z<=this.max.z?!0:!1},getParameter:function(a,b){return(b||new THREE.Vector3).set((a.x-this.min.x)/(this.max.x- +this.min.x),(a.y-this.min.y)/(this.max.y-this.min.y),(a.z-this.min.z)/(this.max.z-this.min.z))},isIntersectionBox:function(a){return a.max.xthis.max.x||a.max.ythis.max.y||a.max.zthis.max.z?!1:!0},clampPoint:function(a,b){return(b||new THREE.Vector3).copy(a).clamp(this.min,this.max)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){return a.copy(b).clamp(this.min,this.max).sub(b).length()}}(),getBoundingSphere:function(){var a= +new THREE.Vector3;return function(b){b=b||new THREE.Sphere;b.center=this.center();b.radius=.5*this.size(a).length();return b}}(),intersect:function(a){this.min.max(a.min);this.max.min(a.max);return this},union:function(a){this.min.min(a.min);this.max.max(a.max);return this},applyMatrix4:function(){var a=[new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3,new THREE.Vector3];return function(b){a[0].set(this.min.x,this.min.y, +this.min.z).applyMatrix4(b);a[1].set(this.min.x,this.min.y,this.max.z).applyMatrix4(b);a[2].set(this.min.x,this.max.y,this.min.z).applyMatrix4(b);a[3].set(this.min.x,this.max.y,this.max.z).applyMatrix4(b);a[4].set(this.max.x,this.min.y,this.min.z).applyMatrix4(b);a[5].set(this.max.x,this.min.y,this.max.z).applyMatrix4(b);a[6].set(this.max.x,this.max.y,this.min.z).applyMatrix4(b);a[7].set(this.max.x,this.max.y,this.max.z).applyMatrix4(b);this.makeEmpty();this.setFromPoints(a);return this}}(),translate:function(a){this.min.add(a); +this.max.add(a);return this},equals:function(a){return a.min.equals(this.min)&&a.max.equals(this.max)},clone:function(){return(new THREE.Box3).copy(this)}};THREE.Matrix3=function(){this.elements=new Float32Array([1,0,0,0,1,0,0,0,1]);0this.determinant()&&(g=-g);c.x=f[12];c.y=f[13];c.z=f[14];b.elements.set(this.elements);c=1/g;var f=1/h,n=1/k;b.elements[0]*=c;b.elements[1]*= +c;b.elements[2]*=c;b.elements[4]*=f;b.elements[5]*=f;b.elements[6]*=f;b.elements[8]*=n;b.elements[9]*=n;b.elements[10]*=n;d.setFromRotationMatrix(b);e.x=g;e.y=h;e.z=k;return this}}(),makeFrustum:function(a,b,c,d,e,f){var g=this.elements;g[0]=2*e/(b-a);g[4]=0;g[8]=(b+a)/(b-a);g[12]=0;g[1]=0;g[5]=2*e/(d-c);g[9]=(d+c)/(d-c);g[13]=0;g[2]=0;g[6]=0;g[10]=-(f+e)/(f-e);g[14]=-2*f*e/(f-e);g[3]=0;g[7]=0;g[11]=-1;g[15]=0;return this},makePerspective:function(a,b,c,d){a=c*Math.tan(THREE.Math.degToRad(.5*a)); +var e=-a;return this.makeFrustum(e*b,a*b,e,a,c,d)},makeOrthographic:function(a,b,c,d,e,f){var g=this.elements,h=b-a,k=c-d,n=f-e;g[0]=2/h;g[4]=0;g[8]=0;g[12]=-((b+a)/h);g[1]=0;g[5]=2/k;g[9]=0;g[13]=-((c+d)/k);g[2]=0;g[6]=0;g[10]=-2/n;g[14]=-((f+e)/n);g[3]=0;g[7]=0;g[11]=0;g[15]=1;return this},fromArray:function(a){this.elements.set(a);return this},toArray:function(){var a=this.elements;return[a[0],a[1],a[2],a[3],a[4],a[5],a[6],a[7],a[8],a[9],a[10],a[11],a[12],a[13],a[14],a[15]]},clone:function(){return(new THREE.Matrix4).fromArray(this.elements)}}; +THREE.Ray=function(a,b){this.origin=void 0!==a?a:new THREE.Vector3;this.direction=void 0!==b?b:new THREE.Vector3}; +THREE.Ray.prototype={constructor:THREE.Ray,set:function(a,b){this.origin.copy(a);this.direction.copy(b);return this},copy:function(a){this.origin.copy(a.origin);this.direction.copy(a.direction);return this},at:function(a,b){return(b||new THREE.Vector3).copy(this.direction).multiplyScalar(a).add(this.origin)},recast:function(){var a=new THREE.Vector3;return function(b){this.origin.copy(this.at(b,a));return this}}(),closestPointToPoint:function(a,b){var c=b||new THREE.Vector3;c.subVectors(a,this.origin); +var d=c.dot(this.direction);return 0>d?c.copy(this.origin):c.copy(this.direction).multiplyScalar(d).add(this.origin)},distanceToPoint:function(){var a=new THREE.Vector3;return function(b){var c=a.subVectors(b,this.origin).dot(this.direction);if(0>c)return this.origin.distanceTo(b);a.copy(this.direction).multiplyScalar(c).add(this.origin);return a.distanceTo(b)}}(),distanceSqToSegment:function(a,b,c,d){var e=a.clone().add(b).multiplyScalar(.5),f=b.clone().sub(a).normalize(),g=.5*a.distanceTo(b),h= +this.origin.clone().sub(e);a=-this.direction.dot(f);b=h.dot(this.direction);var k=-h.dot(f),n=h.lengthSq(),p=Math.abs(1-a*a),q,m;0<=p?(h=a*k-b,q=a*b-k,m=g*p,0<=h?q>=-m?q<=m?(g=1/p,h*=g,q*=g,a=h*(h+a*q+2*b)+q*(a*h+q+2*k)+n):(q=g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):(q=-g,h=Math.max(0,-(a*q+b)),a=-h*h+q*(q+2*k)+n):q<=-m?(h=Math.max(0,-(-a*g+b)),q=0f)return null;f=Math.sqrt(f-e);e=d-f; +d+=f;return 0>e&&0>d?null:0>e?this.at(d,c):this.at(e,c)}}(),isIntersectionPlane:function(a){var b=a.distanceToPoint(this.origin);return 0===b||0>a.normal.dot(this.direction)*b?!0:!1},distanceToPlane:function(a){var b=a.normal.dot(this.direction);if(0==b)return 0==a.distanceToPoint(this.origin)?0:null;a=-(this.origin.dot(a.normal)+a.constant)/b;return 0<=a?a:null},intersectPlane:function(a,b){var c=this.distanceToPlane(a);return null===c?null:this.at(c,b)},isIntersectionBox:function(){var a=new THREE.Vector3; +return function(b){return null!==this.intersectBox(b,a)}}(),intersectBox:function(a,b){var c,d,e,f,g;d=1/this.direction.x;f=1/this.direction.y;g=1/this.direction.z;var h=this.origin;0<=d?(c=(a.min.x-h.x)*d,d*=a.max.x-h.x):(c=(a.max.x-h.x)*d,d*=a.min.x-h.x);0<=f?(e=(a.min.y-h.y)*f,f*=a.max.y-h.y):(e=(a.max.y-h.y)*f,f*=a.min.y-h.y);if(c>f||e>d)return null;if(e>c||c!==c)c=e;if(fg||e>d)return null;if(e>c||c!== +c)c=e;if(gd?null:this.at(0<=c?c:d,b)},intersectTriangle:function(){var a=new THREE.Vector3,b=new THREE.Vector3,c=new THREE.Vector3,d=new THREE.Vector3;return function(e,f,g,h,k){b.subVectors(f,e);c.subVectors(g,e);d.crossVectors(b,c);f=this.direction.dot(d);if(0f)h=-1,f=-f;else return null;a.subVectors(this.origin,e);e=h*this.direction.dot(c.crossVectors(a,c));if(0>e)return null;g=h*this.direction.dot(b.cross(a));if(0>g||e+g>f)return null; +e=-h*a.dot(d);return 0>e?null:this.at(e/f,k)}}(),applyMatrix4:function(a){this.direction.add(this.origin).applyMatrix4(a);this.origin.applyMatrix4(a);this.direction.sub(this.origin);this.direction.normalize();return this},equals:function(a){return a.origin.equals(this.origin)&&a.direction.equals(this.direction)},clone:function(){return(new THREE.Ray).copy(this)}};THREE.Sphere=function(a,b){this.center=void 0!==a?a:new THREE.Vector3;this.radius=void 0!==b?b:0}; +THREE.Sphere.prototype={constructor:THREE.Sphere,set:function(a,b){this.center.copy(a);this.radius=b;return this},setFromPoints:function(){var a=new THREE.Box3;return function(b,c){var d=this.center;void 0!==c?d.copy(c):a.setFromPoints(b).center(d);for(var e=0,f=0,g=b.length;f=this.radius},containsPoint:function(a){return a.distanceToSquared(this.center)<= +this.radius*this.radius},distanceToPoint:function(a){return a.distanceTo(this.center)-this.radius},intersectsSphere:function(a){var b=this.radius+a.radius;return a.center.distanceToSquared(this.center)<=b*b},clampPoint:function(a,b){var c=this.center.distanceToSquared(a),d=b||new THREE.Vector3;d.copy(a);c>this.radius*this.radius&&(d.sub(this.center).normalize(),d.multiplyScalar(this.radius).add(this.center));return d},getBoundingBox:function(a){a=a||new THREE.Box3;a.set(this.center,this.center);a.expandByScalar(this.radius); +return a},applyMatrix4:function(a){this.center.applyMatrix4(a);this.radius*=a.getMaxScaleOnAxis();return this},translate:function(a){this.center.add(a);return this},equals:function(a){return a.center.equals(this.center)&&a.radius===this.radius},clone:function(){return(new THREE.Sphere).copy(this)}}; +THREE.Frustum=function(a,b,c,d,e,f){this.planes=[void 0!==a?a:new THREE.Plane,void 0!==b?b:new THREE.Plane,void 0!==c?c:new THREE.Plane,void 0!==d?d:new THREE.Plane,void 0!==e?e:new THREE.Plane,void 0!==f?f:new THREE.Plane]}; +THREE.Frustum.prototype={constructor:THREE.Frustum,set:function(a,b,c,d,e,f){var g=this.planes;g[0].copy(a);g[1].copy(b);g[2].copy(c);g[3].copy(d);g[4].copy(e);g[5].copy(f);return this},copy:function(a){for(var b=this.planes,c=0;6>c;c++)b[c].copy(a.planes[c]);return this},setFromMatrix:function(a){var b=this.planes,c=a.elements;a=c[0];var d=c[1],e=c[2],f=c[3],g=c[4],h=c[5],k=c[6],n=c[7],p=c[8],q=c[9],m=c[10],r=c[11],t=c[12],s=c[13],u=c[14],c=c[15];b[0].setComponents(f-a,n-g,r-p,c-t).normalize();b[1].setComponents(f+ +a,n+g,r+p,c+t).normalize();b[2].setComponents(f+d,n+h,r+q,c+s).normalize();b[3].setComponents(f-d,n-h,r-q,c-s).normalize();b[4].setComponents(f-e,n-k,r-m,c-u).normalize();b[5].setComponents(f+e,n+k,r+m,c+u).normalize();return this},intersectsObject:function(){var a=new THREE.Sphere;return function(b){var c=b.geometry;null===c.boundingSphere&&c.computeBoundingSphere();a.copy(c.boundingSphere);a.applyMatrix4(b.matrixWorld);return this.intersectsSphere(a)}}(),intersectsSphere:function(a){var b=this.planes, +c=a.center;a=-a.radius;for(var d=0;6>d;d++)if(b[d].distanceToPoint(c)e;e++){var f=d[e];a.x=0g&&0>f)return!1}return!0}}(), +containsPoint:function(a){for(var b=this.planes,c=0;6>c;c++)if(0>b[c].distanceToPoint(a))return!1;return!0},clone:function(){return(new THREE.Frustum).copy(this)}};THREE.Plane=function(a,b){this.normal=void 0!==a?a:new THREE.Vector3(1,0,0);this.constant=void 0!==b?b:0}; +THREE.Plane.prototype={constructor:THREE.Plane,set:function(a,b){this.normal.copy(a);this.constant=b;return this},setComponents:function(a,b,c,d){this.normal.set(a,b,c);this.constant=d;return this},setFromNormalAndCoplanarPoint:function(a,b){this.normal.copy(a);this.constant=-b.dot(this.normal);return this},setFromCoplanarPoints:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(c,d,e){d=a.subVectors(e,d).cross(b.subVectors(c,d)).normalize();this.setFromNormalAndCoplanarPoint(d, +c);return this}}(),copy:function(a){this.normal.copy(a.normal);this.constant=a.constant;return this},normalize:function(){var a=1/this.normal.length();this.normal.multiplyScalar(a);this.constant*=a;return this},negate:function(){this.constant*=-1;this.normal.negate();return this},distanceToPoint:function(a){return this.normal.dot(a)+this.constant},distanceToSphere:function(a){return this.distanceToPoint(a.center)-a.radius},projectPoint:function(a,b){return this.orthoPoint(a,b).sub(a).negate()},orthoPoint:function(a, +b){var c=this.distanceToPoint(a);return(b||new THREE.Vector3).copy(this.normal).multiplyScalar(c)},isIntersectionLine:function(a){var b=this.distanceToPoint(a.start);a=this.distanceToPoint(a.end);return 0>b&&0a&&0f||1e;e++)8==e||13==e||18==e||23==e?b[e]="-":14==e?b[e]="4":(2>=c&&(c=33554432+16777216*Math.random()|0),d=c&15,c>>=4,b[e]=a[19==e?d&3|8:d]);return b.join("")}}(),clamp:function(a,b,c){return ac?c:a},clampBottom:function(a,b){return a=c)return 1;a=(a-b)/(c-b);return a*a*(3-2*a)},smootherstep:function(a,b,c){if(a<=b)return 0;if(a>=c)return 1;a=(a-b)/(c-b);return a*a*a*(a*(6*a-15)+10)},random16:function(){return(65280*Math.random()+255*Math.random())/65535},randInt:function(a,b){return a+Math.floor(Math.random()*(b-a+1))},randFloat:function(a,b){return a+Math.random()*(b-a)},randFloatSpread:function(a){return a*(.5-Math.random())},degToRad:function(){var a=Math.PI/180;return function(b){return b*a}}(),radToDeg:function(){var a= +180/Math.PI;return function(b){return b*a}}(),isPowerOfTwo:function(a){return 0===(a&a-1)&&0!==a}}; +THREE.Spline=function(a){function b(a,b,c,d,e,f,g){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*g+(-3*(b-c)-2*a-d)*f+a*e+b}this.points=a;var c=[],d={x:0,y:0,z:0},e,f,g,h,k,n,p,q,m;this.initFromArray=function(a){this.points=[];for(var b=0;bthis.points.length-2?this.points.length-1:f+1;c[3]=f>this.points.length-3?this.points.length-1:f+ +2;n=this.points[c[0]];p=this.points[c[1]];q=this.points[c[2]];m=this.points[c[3]];h=g*g;k=g*h;d.x=b(n.x,p.x,q.x,m.x,g,h,k);d.y=b(n.y,p.y,q.y,m.y,g,h,k);d.z=b(n.z,p.z,q.z,m.z,g,h,k);return d};this.getControlPointsArray=function(){var a,b,c=this.points.length,d=[];for(a=0;a=b.x+b.y}}(); +THREE.Triangle.prototype={constructor:THREE.Triangle,set:function(a,b,c){this.a.copy(a);this.b.copy(b);this.c.copy(c);return this},setFromPointsAndIndices:function(a,b,c,d){this.a.copy(a[b]);this.b.copy(a[c]);this.c.copy(a[d]);return this},copy:function(a){this.a.copy(a.a);this.b.copy(a.b);this.c.copy(a.c);return this},area:function(){var a=new THREE.Vector3,b=new THREE.Vector3;return function(){a.subVectors(this.c,this.b);b.subVectors(this.a,this.b);return.5*a.cross(b).length()}}(),midpoint:function(a){return(a|| +new THREE.Vector3).addVectors(this.a,this.b).add(this.c).multiplyScalar(1/3)},normal:function(a){return THREE.Triangle.normal(this.a,this.b,this.c,a)},plane:function(a){return(a||new THREE.Plane).setFromCoplanarPoints(this.a,this.b,this.c)},barycoordFromPoint:function(a,b){return THREE.Triangle.barycoordFromPoint(a,this.a,this.b,this.c,b)},containsPoint:function(a){return THREE.Triangle.containsPoint(a,this.a,this.b,this.c)},equals:function(a){return a.a.equals(this.a)&&a.b.equals(this.b)&&a.c.equals(this.c)}, +clone:function(){return(new THREE.Triangle).copy(this)}};THREE.Clock=function(a){this.autoStart=void 0!==a?a:!0;this.elapsedTime=this.oldTime=this.startTime=0;this.running=!1}; +THREE.Clock.prototype={constructor:THREE.Clock,start:function(){this.oldTime=this.startTime=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now();this.running=!0},stop:function(){this.getElapsedTime();this.running=!1},getElapsedTime:function(){this.getDelta();return this.elapsedTime},getDelta:function(){var a=0;this.autoStart&&!this.running&&this.start();if(this.running){var b=void 0!==self.performance&&void 0!==self.performance.now?self.performance.now():Date.now(), +a=.001*(b-this.oldTime);this.oldTime=b;this.elapsedTime+=a}return a}};THREE.EventDispatcher=function(){}; +THREE.EventDispatcher.prototype={constructor:THREE.EventDispatcher,apply:function(a){a.addEventListener=THREE.EventDispatcher.prototype.addEventListener;a.hasEventListener=THREE.EventDispatcher.prototype.hasEventListener;a.removeEventListener=THREE.EventDispatcher.prototype.removeEventListener;a.dispatchEvent=THREE.EventDispatcher.prototype.dispatchEvent},addEventListener:function(a,b){void 0===this._listeners&&(this._listeners={});var c=this._listeners;void 0===c[a]&&(c[a]=[]);-1===c[a].indexOf(b)&& +c[a].push(b)},hasEventListener:function(a,b){if(void 0===this._listeners)return!1;var c=this._listeners;return void 0!==c[a]&&-1!==c[a].indexOf(b)?!0:!1},removeEventListener:function(a,b){if(void 0!==this._listeners){var c=this._listeners[a];if(void 0!==c){var d=c.indexOf(b);-1!==d&&c.splice(d,1)}}},dispatchEvent:function(a){if(void 0!==this._listeners){var b=this._listeners[a.type];if(void 0!==b){a.target=this;for(var c=[],d=b.length,e=0;eza?-1:1;h[4*a]=la.x;h[4*a+1]=la.y;h[4*a+2]=la.z;h[4*a+3]=Ga}if(void 0===this.attributes.index||void 0===this.attributes.position||void 0===this.attributes.normal||void 0===this.attributes.uv)console.warn("Missing required attributes (index, position, normal or uv) in BufferGeometry.computeTangents()");else{var c=this.attributes.index.array,d=this.attributes.position.array, +e=this.attributes.normal.array,f=this.attributes.uv.array,g=d.length/3;void 0===this.attributes.tangent&&this.addAttribute("tangent",new THREE.BufferAttribute(new Float32Array(4*g),4));for(var h=this.attributes.tangent.array,k=[],n=[],p=0;ps;s++)t=a[3*c+s],-1==m[t]?(q[2*s]=t,q[2*s+1]=-1,p++):m[t]k.index+b)for(k={start:f,count:0,index:g},h.push(k),p=0;6>p;p+=2)s=q[p+1],-1p;p+=2)t=q[p],s=q[p+1],-1===s&&(s=g++),m[t]=s,r[s]=t,e[f++]=s-k.index,k.count++}this.reorderBuffers(e,r,g);return this.offsets=h},merge:function(){console.log("BufferGeometry.merge(): TODO")},normalizeNormals:function(){for(var a=this.attributes.normal.array,b,c,d,e=0,f=a.length;ed?-1:1,e.vertexTangents[c]=new THREE.Vector4(w.x,w.y,w.z,d);this.hasTangents=!0},computeLineDistances:function(){for(var a=0,b=this.vertices,c=0,d=b.length;cd;d++)if(e[d]==e[(d+1)%3]){a.push(f);break}for(f=a.length-1;0<=f;f--)for(e=a[f],this.faces.splice(e,1),c=0,g=this.faceVertexUvs.length;ca.opacity)h.transparent=a.transparent;void 0!==a.depthTest&&(h.depthTest=a.depthTest);void 0!==a.depthWrite&&(h.depthWrite=a.depthWrite);void 0!==a.visible&&(h.visible=a.visible);void 0!==a.flipSided&&(h.side=THREE.BackSide);void 0!==a.doubleSided&&(h.side=THREE.DoubleSide);void 0!==a.wireframe&&(h.wireframe=a.wireframe);void 0!==a.vertexColors&&("face"=== +a.vertexColors?h.vertexColors=THREE.FaceColors:a.vertexColors&&(h.vertexColors=THREE.VertexColors));a.colorDiffuse?h.color=e(a.colorDiffuse):a.DbgColor&&(h.color=a.DbgColor);a.colorSpecular&&(h.specular=e(a.colorSpecular));a.colorAmbient&&(h.ambient=e(a.colorAmbient));a.colorEmissive&&(h.emissive=e(a.colorEmissive));a.transparency&&(h.opacity=a.transparency);a.specularCoef&&(h.shininess=a.specularCoef);a.mapDiffuse&&b&&d(h,"map",a.mapDiffuse,a.mapDiffuseRepeat,a.mapDiffuseOffset,a.mapDiffuseWrap, +a.mapDiffuseAnisotropy);a.mapLight&&b&&d(h,"lightMap",a.mapLight,a.mapLightRepeat,a.mapLightOffset,a.mapLightWrap,a.mapLightAnisotropy);a.mapBump&&b&&d(h,"bumpMap",a.mapBump,a.mapBumpRepeat,a.mapBumpOffset,a.mapBumpWrap,a.mapBumpAnisotropy);a.mapNormal&&b&&d(h,"normalMap",a.mapNormal,a.mapNormalRepeat,a.mapNormalOffset,a.mapNormalWrap,a.mapNormalAnisotropy);a.mapSpecular&&b&&d(h,"specularMap",a.mapSpecular,a.mapSpecularRepeat,a.mapSpecularOffset,a.mapSpecularWrap,a.mapSpecularAnisotropy);a.mapAlpha&& +b&&d(h,"alphaMap",a.mapAlpha,a.mapAlphaRepeat,a.mapAlphaOffset,a.mapAlphaWrap,a.mapAlphaAnisotropy);a.mapBumpScale&&(h.bumpScale=a.mapBumpScale);a.mapNormal?(g=THREE.ShaderLib.normalmap,k=THREE.UniformsUtils.clone(g.uniforms),k.tNormal.value=h.normalMap,a.mapNormalFactor&&k.uNormalScale.value.set(a.mapNormalFactor,a.mapNormalFactor),h.map&&(k.tDiffuse.value=h.map,k.enableDiffuse.value=!0),h.specularMap&&(k.tSpecular.value=h.specularMap,k.enableSpecular.value=!0),h.lightMap&&(k.tAO.value=h.lightMap, +k.enableAO.value=!0),k.diffuse.value.setHex(h.color),k.specular.value.setHex(h.specular),k.ambient.value.setHex(h.ambient),k.shininess.value=h.shininess,void 0!==h.opacity&&(k.opacity.value=h.opacity),g=new THREE.ShaderMaterial({fragmentShader:g.fragmentShader,vertexShader:g.vertexShader,uniforms:k,lights:!0,fog:!0}),h.transparent&&(g.transparent=!0)):g=new THREE[g](h);void 0!==a.DbgName&&(g.name=a.DbgName);return g}}; +THREE.Loader.Handlers={handlers:[],add:function(a,b){this.handlers.push(a,b)},get:function(a){for(var b=0,c=this.handlers.length;bg;g++)m=y[k++],v=u[2*m],m=u[2*m+1],v=new THREE.Vector2(v,m),2!==g&&c.faceVertexUvs[d][h].push(v),0!==g&&c.faceVertexUvs[d][h+1].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]),s.normal.copy(r.normal));if(t)for(d=0;4>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++], +G[q++],G[q]),2!==d&&r.vertexNormals.push(t),0!==d&&s.vertexNormals.push(t);p&&(p=y[k++],p=w[p],r.color.setHex(p),s.color.setHex(p));if(b)for(d=0;4>d;d++)p=y[k++],p=w[p],2!==d&&r.vertexColors.push(new THREE.Color(p)),0!==d&&s.vertexColors.push(new THREE.Color(p));c.faces.push(r);c.faces.push(s)}else{r=new THREE.Face3;r.a=y[k++];r.b=y[k++];r.c=y[k++];h&&(h=y[k++],r.materialIndex=h);h=c.faces.length;if(d)for(d=0;dg;g++)m=y[k++],v=u[2*m],m=u[2*m+1], +v=new THREE.Vector2(v,m),c.faceVertexUvs[d][h].push(v);q&&(q=3*y[k++],r.normal.set(G[q++],G[q++],G[q]));if(t)for(d=0;3>d;d++)q=3*y[k++],t=new THREE.Vector3(G[q++],G[q++],G[q]),r.vertexNormals.push(t);p&&(p=y[k++],r.color.setHex(w[p]));if(b)for(d=0;3>d;d++)p=y[k++],r.vertexColors.push(new THREE.Color(w[p]));c.faces.push(r)}})(d);(function(){var b=void 0!==a.influencesPerVertex?a.influencesPerVertex:2;if(a.skinWeights)for(var d=0,g=a.skinWeights.length;dthis.opacity&&(a.opacity=this.opacity);!1!==this.transparent&&(a.transparent=this.transparent);!1!==this.wireframe&&(a.wireframe=this.wireframe);return a},clone:function(a){void 0===a&&(a=new THREE.Material);a.name=this.name;a.side=this.side;a.opacity=this.opacity;a.transparent=this.transparent;a.blending=this.blending;a.blendSrc=this.blendSrc;a.blendDst=this.blendDst;a.blendEquation=this.blendEquation;a.depthTest=this.depthTest;a.depthWrite=this.depthWrite;a.polygonOffset=this.polygonOffset;a.polygonOffsetFactor= +this.polygonOffsetFactor;a.polygonOffsetUnits=this.polygonOffsetUnits;a.alphaTest=this.alphaTest;a.overdraw=this.overdraw;a.visible=this.visible;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.Material.prototype);THREE.MaterialIdCount=0; +THREE.LineBasicMaterial=function(a){THREE.Material.call(this);this.type="LineBasicMaterial";this.color=new THREE.Color(16777215);this.linewidth=1;this.linejoin=this.linecap="round";this.vertexColors=THREE.NoColors;this.fog=!0;this.setValues(a)};THREE.LineBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineBasicMaterial.prototype.clone=function(){var a=new THREE.LineBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.linecap=this.linecap;a.linejoin=this.linejoin;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.LineDashedMaterial=function(a){THREE.Material.call(this);this.type="LineDashedMaterial";this.color=new THREE.Color(16777215);this.scale=this.linewidth=1;this.dashSize=3;this.gapSize=1;this.vertexColors=!1;this.fog=!0;this.setValues(a)};THREE.LineDashedMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.LineDashedMaterial.prototype.clone=function(){var a=new THREE.LineDashedMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.linewidth=this.linewidth;a.scale=this.scale;a.dashSize=this.dashSize;a.gapSize=this.gapSize;a.vertexColors=this.vertexColors;a.fog=this.fog;return a}; +THREE.MeshBasicMaterial=function(a){THREE.Material.call(this);this.type="MeshBasicMaterial";this.color=new THREE.Color(16777215);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphTargets=this.skinning=!1;this.setValues(a)}; +THREE.MeshBasicMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshBasicMaterial.prototype.clone=function(){var a=new THREE.MeshBasicMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap; +a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;return a}; +THREE.MeshLambertMaterial=function(a){THREE.Material.call(this);this.type="MeshLambertMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.wrapAround=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.envMap=this.alphaMap=this.specularMap=this.lightMap=this.map=null;this.combine=THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth= +1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshLambertMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshLambertMaterial.prototype.clone=function(){var a=new THREE.MeshLambertMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog; +a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshPhongMaterial=function(a){THREE.Material.call(this);this.type="MeshPhongMaterial";this.color=new THREE.Color(16777215);this.ambient=new THREE.Color(16777215);this.emissive=new THREE.Color(0);this.specular=new THREE.Color(1118481);this.shininess=30;this.wrapAround=this.metal=!1;this.wrapRGB=new THREE.Vector3(1,1,1);this.bumpMap=this.lightMap=this.map=null;this.bumpScale=1;this.normalMap=null;this.normalScale=new THREE.Vector2(1,1);this.envMap=this.alphaMap=this.specularMap=null;this.combine= +THREE.MultiplyOperation;this.reflectivity=1;this.refractionRatio=.98;this.fog=!0;this.shading=THREE.SmoothShading;this.wireframe=!1;this.wireframeLinewidth=1;this.wireframeLinejoin=this.wireframeLinecap="round";this.vertexColors=THREE.NoColors;this.morphNormals=this.morphTargets=this.skinning=!1;this.setValues(a)};THREE.MeshPhongMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshPhongMaterial.prototype.clone=function(){var a=new THREE.MeshPhongMaterial;THREE.Material.prototype.clone.call(this,a);a.color.copy(this.color);a.ambient.copy(this.ambient);a.emissive.copy(this.emissive);a.specular.copy(this.specular);a.shininess=this.shininess;a.metal=this.metal;a.wrapAround=this.wrapAround;a.wrapRGB.copy(this.wrapRGB);a.map=this.map;a.lightMap=this.lightMap;a.bumpMap=this.bumpMap;a.bumpScale=this.bumpScale;a.normalMap=this.normalMap;a.normalScale.copy(this.normalScale); +a.specularMap=this.specularMap;a.alphaMap=this.alphaMap;a.envMap=this.envMap;a.combine=this.combine;a.reflectivity=this.reflectivity;a.refractionRatio=this.refractionRatio;a.fog=this.fog;a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;a.wireframeLinecap=this.wireframeLinecap;a.wireframeLinejoin=this.wireframeLinejoin;a.vertexColors=this.vertexColors;a.skinning=this.skinning;a.morphTargets=this.morphTargets;a.morphNormals=this.morphNormals;return a}; +THREE.MeshDepthMaterial=function(a){THREE.Material.call(this);this.type="MeshDepthMaterial";this.wireframe=this.morphTargets=!1;this.wireframeLinewidth=1;this.setValues(a)};THREE.MeshDepthMaterial.prototype=Object.create(THREE.Material.prototype);THREE.MeshDepthMaterial.prototype.clone=function(){var a=new THREE.MeshDepthMaterial;THREE.Material.prototype.clone.call(this,a);a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a}; +THREE.MeshNormalMaterial=function(a){THREE.Material.call(this,a);this.type="MeshNormalMaterial";this.shading=THREE.FlatShading;this.wireframe=!1;this.wireframeLinewidth=1;this.morphTargets=!1;this.setValues(a)};THREE.MeshNormalMaterial.prototype=Object.create(THREE.Material.prototype); +THREE.MeshNormalMaterial.prototype.clone=function(){var a=new THREE.MeshNormalMaterial;THREE.Material.prototype.clone.call(this,a);a.shading=this.shading;a.wireframe=this.wireframe;a.wireframeLinewidth=this.wireframeLinewidth;return a};THREE.MeshFaceMaterial=function(a){this.uuid=THREE.Math.generateUUID();this.type="MeshFaceMaterial";this.materials=a instanceof Array?a:[]}; +THREE.MeshFaceMaterial.prototype={constructor:THREE.MeshFaceMaterial,toJSON:function(){for(var a={metadata:{version:4.2,type:"material",generator:"MaterialExporter"},uuid:this.uuid,type:this.type,materials:[]},b=0,c=this.materials.length;bf)){var m=b.origin.distanceTo(n);md.far||e.push({distance:m,point:k.clone().applyMatrix4(this.matrixWorld),face:null,faceIndex:null,object:this})}}}();THREE.Line.prototype.clone=function(a){void 0===a&&(a=new THREE.Line(this.geometry,this.material,this.mode));THREE.Object3D.prototype.clone.call(this,a);return a}; +THREE.Mesh=function(a,b){THREE.Object3D.call(this);this.type="Mesh";this.geometry=void 0!==a?a:new THREE.Geometry;this.material=void 0!==b?b:new THREE.MeshBasicMaterial({color:16777215*Math.random()});this.updateMorphTargets()};THREE.Mesh.prototype=Object.create(THREE.Object3D.prototype); +THREE.Mesh.prototype.updateMorphTargets=function(){if(void 0!==this.geometry.morphTargets&&0g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this})}}}else for(s=p.position.array,t=k=0,w=s.length;k +g.far||h.push({distance:x,point:K,face:new THREE.Face3(p,q,m,THREE.Triangle.normal(d,e,f)),faceIndex:null,object:this}))}}else if(k instanceof THREE.Geometry)for(t=this.material instanceof THREE.MeshFaceMaterial,s=!0===t?this.material.materials:null,r=g.precision,u=k.vertices,v=0,y=k.faces.length;vg.far||h.push({distance:x,point:K,face:G,faceIndex:v,object:this}))}}}();THREE.Mesh.prototype.clone=function(a,b){void 0===a&&(a=new THREE.Mesh(this.geometry,this.material));THREE.Object3D.prototype.clone.call(this,a,b);return a};THREE.Bone=function(a){THREE.Object3D.call(this);this.skin=a};THREE.Bone.prototype=Object.create(THREE.Object3D.prototype); +THREE.Skeleton=function(a,b,c){this.useVertexTexture=void 0!==c?c:!0;this.identityMatrix=new THREE.Matrix4;a=a||[];this.bones=a.slice(0);this.useVertexTexture?(this.boneTextureHeight=this.boneTextureWidth=a=256h.end&&(h.end=e);b||(b=g)}}a.firstAnimation=b}; +THREE.MorphAnimMesh.prototype.setAnimationLabel=function(a,b,c){this.geometry.animations||(this.geometry.animations={});this.geometry.animations[a]={start:b,end:c}};THREE.MorphAnimMesh.prototype.playAnimation=function(a,b){var c=this.geometry.animations[a];c?(this.setFrameRange(c.start,c.end),this.duration=(c.end-c.start)/b*1E3,this.time=0):console.warn("animation["+a+"] undefined")}; +THREE.MorphAnimMesh.prototype.updateAnimation=function(a){var b=this.duration/this.length;this.time+=this.direction*a;if(this.mirroredLoop){if(this.time>this.duration||0>this.time)this.direction*=-1,this.time>this.duration&&(this.time=this.duration,this.directionBackwards=!0),0>this.time&&(this.time=0,this.directionBackwards=!1)}else this.time%=this.duration,0>this.time&&(this.time+=this.duration);a=this.startKeyframe+THREE.Math.clamp(Math.floor(this.time/b),0,this.length-1);a!==this.currentKeyframe&& +(this.morphTargetInfluences[this.lastKeyframe]=0,this.morphTargetInfluences[this.currentKeyframe]=1,this.morphTargetInfluences[a]=0,this.lastKeyframe=this.currentKeyframe,this.currentKeyframe=a);b=this.time%b/b;this.directionBackwards&&(b=1-b);this.morphTargetInfluences[this.currentKeyframe]=b;this.morphTargetInfluences[this.lastKeyframe]=1-b}; +THREE.MorphAnimMesh.prototype.interpolateTargets=function(a,b,c){for(var d=this.morphTargetInfluences,e=0,f=d.length;e=this.objects[d].distance)this.objects[d-1].object.visible=!1,this.objects[d].object.visible=!0;else break;for(;dthis.scale.x||c.push({distance:d,point:this.position,face:null,object:this})}}();THREE.Sprite.prototype.clone=function(a){void 0===a&&(a=new THREE.Sprite(this.material));THREE.Object3D.prototype.clone.call(this,a);return a};THREE.Particle=THREE.Sprite; +THREE.LensFlare=function(a,b,c,d,e){THREE.Object3D.call(this);this.lensFlares=[];this.positionScreen=new THREE.Vector3;this.customUpdateCallback=void 0;void 0!==a&&this.add(a,b,c,d,e)};THREE.LensFlare.prototype=Object.create(THREE.Object3D.prototype); +THREE.LensFlare.prototype.add=function(a,b,c,d,e,f){void 0===b&&(b=-1);void 0===c&&(c=0);void 0===f&&(f=1);void 0===e&&(e=new THREE.Color(16777215));void 0===d&&(d=THREE.NormalBlending);c=Math.min(c,Math.max(0,c));this.lensFlares.push({texture:a,size:b,distance:c,x:0,y:0,z:0,scale:1,rotation:1,opacity:f,color:e,blending:d})}; +THREE.LensFlare.prototype.updateLensFlares=function(){var a,b=this.lensFlares.length,c,d=2*-this.positionScreen.x,e=2*-this.positionScreen.y;for(a=0;a dashSize ) {\n\t\tdiscard;\n\t}\n\tgl_FragColor = vec4( diffuse, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,THREE.ShaderChunk.color_fragment,THREE.ShaderChunk.fog_fragment, +"}"].join("\n")},depth:{uniforms:{mNear:{type:"f",value:1},mFar:{type:"f",value:2E3},opacity:{type:"f",value:1}},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float mNear;\nuniform float mFar;\nuniform float opacity;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment, +"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tfloat depth = gl_FragDepthEXT / gl_FragCoord.w;\n\t#else\n\t\tfloat depth = gl_FragCoord.z / gl_FragCoord.w;\n\t#endif\n\tfloat color = 1.0 - smoothstep( mNear, mFar, depth );\n\tgl_FragColor = vec4( vec3( color ), opacity );\n}"].join("\n")},normal:{uniforms:{opacity:{type:"f",value:1}},vertexShader:["varying vec3 vNormal;",THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvNormal = normalize( normalMatrix * normal );", +THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform float opacity;\nvarying vec3 vNormal;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = vec4( 0.5 * normalize( vNormal ) + 0.5, opacity );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},normalmap:{uniforms:THREE.UniformsUtils.merge([THREE.UniformsLib.fog,THREE.UniformsLib.lights,THREE.UniformsLib.shadowmap,{enableAO:{type:"i", +value:0},enableDiffuse:{type:"i",value:0},enableSpecular:{type:"i",value:0},enableReflection:{type:"i",value:0},enableDisplacement:{type:"i",value:0},tDisplacement:{type:"t",value:null},tDiffuse:{type:"t",value:null},tCube:{type:"t",value:null},tNormal:{type:"t",value:null},tSpecular:{type:"t",value:null},tAO:{type:"t",value:null},uNormalScale:{type:"v2",value:new THREE.Vector2(1,1)},uDisplacementBias:{type:"f",value:0},uDisplacementScale:{type:"f",value:1},diffuse:{type:"c",value:new THREE.Color(16777215)}, +specular:{type:"c",value:new THREE.Color(1118481)},ambient:{type:"c",value:new THREE.Color(16777215)},shininess:{type:"f",value:30},opacity:{type:"f",value:1},useRefract:{type:"i",value:0},refractionRatio:{type:"f",value:.98},reflectivity:{type:"f",value:.5},uOffset:{type:"v2",value:new THREE.Vector2(0,0)},uRepeat:{type:"v2",value:new THREE.Vector2(1,1)},wrapRGB:{type:"v3",value:new THREE.Vector3(1,1,1)}}]),fragmentShader:["uniform vec3 ambient;\nuniform vec3 diffuse;\nuniform vec3 specular;\nuniform float shininess;\nuniform float opacity;\nuniform bool enableDiffuse;\nuniform bool enableSpecular;\nuniform bool enableAO;\nuniform bool enableReflection;\nuniform sampler2D tDiffuse;\nuniform sampler2D tNormal;\nuniform sampler2D tSpecular;\nuniform sampler2D tAO;\nuniform samplerCube tCube;\nuniform vec2 uNormalScale;\nuniform bool useRefract;\nuniform float refractionRatio;\nuniform float reflectivity;\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nuniform vec3 ambientLightColor;\n#if MAX_DIR_LIGHTS > 0\n\tuniform vec3 directionalLightColor[ MAX_DIR_LIGHTS ];\n\tuniform vec3 directionalLightDirection[ MAX_DIR_LIGHTS ];\n#endif\n#if MAX_HEMI_LIGHTS > 0\n\tuniform vec3 hemisphereLightSkyColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightGroundColor[ MAX_HEMI_LIGHTS ];\n\tuniform vec3 hemisphereLightDirection[ MAX_HEMI_LIGHTS ];\n#endif\n#if MAX_POINT_LIGHTS > 0\n\tuniform vec3 pointLightColor[ MAX_POINT_LIGHTS ];\n\tuniform vec3 pointLightPosition[ MAX_POINT_LIGHTS ];\n\tuniform float pointLightDistance[ MAX_POINT_LIGHTS ];\n#endif\n#if MAX_SPOT_LIGHTS > 0\n\tuniform vec3 spotLightColor[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightPosition[ MAX_SPOT_LIGHTS ];\n\tuniform vec3 spotLightDirection[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightAngleCos[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightExponent[ MAX_SPOT_LIGHTS ];\n\tuniform float spotLightDistance[ MAX_SPOT_LIGHTS ];\n#endif\n#ifdef WRAP_AROUND\n\tuniform vec3 wrapRGB;\n#endif\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.shadowmap_pars_fragment,THREE.ShaderChunk.fog_pars_fragment,THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {",THREE.ShaderChunk.logdepthbuf_fragment,"\tgl_FragColor = vec4( vec3( 1.0 ), opacity );\n\tvec3 specularTex = vec3( 1.0 );\n\tvec3 normalTex = texture2D( tNormal, vUv ).xyz * 2.0 - 1.0;\n\tnormalTex.xy *= uNormalScale;\n\tnormalTex = normalize( normalTex );\n\tif( enableDiffuse ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 texelColor = texture2D( tDiffuse, vUv );\n\t\t\ttexelColor.xyz *= texelColor.xyz;\n\t\t\tgl_FragColor = gl_FragColor * texelColor;\n\t\t#else\n\t\t\tgl_FragColor = gl_FragColor * texture2D( tDiffuse, vUv );\n\t\t#endif\n\t}\n\tif( enableAO ) {\n\t\t#ifdef GAMMA_INPUT\n\t\t\tvec4 aoColor = texture2D( tAO, vUv );\n\t\t\taoColor.xyz *= aoColor.xyz;\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * aoColor.xyz;\n\t\t#else\n\t\t\tgl_FragColor.xyz = gl_FragColor.xyz * texture2D( tAO, vUv ).xyz;\n\t\t#endif\n\t}", +THREE.ShaderChunk.alphatest_fragment,"\tif( enableSpecular )\n\t\tspecularTex = texture2D( tSpecular, vUv ).xyz;\n\tmat3 tsb = mat3( normalize( vTangent ), normalize( vBinormal ), normalize( vNormal ) );\n\tvec3 finalNormal = tsb * normalTex;\n\t#ifdef FLIP_SIDED\n\t\tfinalNormal = -finalNormal;\n\t#endif\n\tvec3 normal = normalize( finalNormal );\n\tvec3 viewPosition = normalize( vViewPosition );\n\t#if MAX_POINT_LIGHTS > 0\n\t\tvec3 pointDiffuse = vec3( 0.0 );\n\t\tvec3 pointSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_POINT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( pointLightPosition[ i ], 1.0 );\n\t\t\tvec3 pointVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat pointDistance = 1.0;\n\t\t\tif ( pointLightDistance[ i ] > 0.0 )\n\t\t\t\tpointDistance = 1.0 - min( ( length( pointVector ) / pointLightDistance[ i ] ), 1.0 );\n\t\t\tpointVector = normalize( pointVector );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat pointDiffuseWeightFull = max( dot( normal, pointVector ), 0.0 );\n\t\t\t\tfloat pointDiffuseWeightHalf = max( 0.5 * dot( normal, pointVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 pointDiffuseWeight = mix( vec3( pointDiffuseWeightFull ), vec3( pointDiffuseWeightHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat pointDiffuseWeight = max( dot( normal, pointVector ), 0.0 );\n\t\t\t#endif\n\t\t\tpointDiffuse += pointDistance * pointLightColor[ i ] * diffuse * pointDiffuseWeight;\n\t\t\tvec3 pointHalfVector = normalize( pointVector + viewPosition );\n\t\t\tfloat pointDotNormalHalf = max( dot( normal, pointHalfVector ), 0.0 );\n\t\t\tfloat pointSpecularWeight = specularTex.r * max( pow( pointDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( pointVector, pointHalfVector ), 0.0 ), 5.0 );\n\t\t\tpointSpecular += schlick * pointLightColor[ i ] * pointSpecularWeight * pointDiffuseWeight * pointDistance * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\tvec3 spotDiffuse = vec3( 0.0 );\n\t\tvec3 spotSpecular = vec3( 0.0 );\n\t\tfor ( int i = 0; i < MAX_SPOT_LIGHTS; i ++ ) {\n\t\t\tvec4 lPosition = viewMatrix * vec4( spotLightPosition[ i ], 1.0 );\n\t\t\tvec3 spotVector = lPosition.xyz + vViewPosition.xyz;\n\t\t\tfloat spotDistance = 1.0;\n\t\t\tif ( spotLightDistance[ i ] > 0.0 )\n\t\t\t\tspotDistance = 1.0 - min( ( length( spotVector ) / spotLightDistance[ i ] ), 1.0 );\n\t\t\tspotVector = normalize( spotVector );\n\t\t\tfloat spotEffect = dot( spotLightDirection[ i ], normalize( spotLightPosition[ i ] - vWorldPosition ) );\n\t\t\tif ( spotEffect > spotLightAngleCos[ i ] ) {\n\t\t\t\tspotEffect = max( pow( max( spotEffect, 0.0 ), spotLightExponent[ i ] ), 0.0 );\n\t\t\t\t#ifdef WRAP_AROUND\n\t\t\t\t\tfloat spotDiffuseWeightFull = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t\tfloat spotDiffuseWeightHalf = max( 0.5 * dot( normal, spotVector ) + 0.5, 0.0 );\n\t\t\t\t\tvec3 spotDiffuseWeight = mix( vec3( spotDiffuseWeightFull ), vec3( spotDiffuseWeightHalf ), wrapRGB );\n\t\t\t\t#else\n\t\t\t\t\tfloat spotDiffuseWeight = max( dot( normal, spotVector ), 0.0 );\n\t\t\t\t#endif\n\t\t\t\tspotDiffuse += spotDistance * spotLightColor[ i ] * diffuse * spotDiffuseWeight * spotEffect;\n\t\t\t\tvec3 spotHalfVector = normalize( spotVector + viewPosition );\n\t\t\t\tfloat spotDotNormalHalf = max( dot( normal, spotHalfVector ), 0.0 );\n\t\t\t\tfloat spotSpecularWeight = specularTex.r * max( pow( spotDotNormalHalf, shininess ), 0.0 );\n\t\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( spotVector, spotHalfVector ), 0.0 ), 5.0 );\n\t\t\t\tspotSpecular += schlick * spotLightColor[ i ] * spotSpecularWeight * spotDiffuseWeight * spotDistance * specularNormalization * spotEffect;\n\t\t\t}\n\t\t}\n\t#endif\n\t#if MAX_DIR_LIGHTS > 0\n\t\tvec3 dirDiffuse = vec3( 0.0 );\n\t\tvec3 dirSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_DIR_LIGHTS; i++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( directionalLightDirection[ i ], 0.0 );\n\t\t\tvec3 dirVector = normalize( lDirection.xyz );\n\t\t\t#ifdef WRAP_AROUND\n\t\t\t\tfloat directionalLightWeightingFull = max( dot( normal, dirVector ), 0.0 );\n\t\t\t\tfloat directionalLightWeightingHalf = max( 0.5 * dot( normal, dirVector ) + 0.5, 0.0 );\n\t\t\t\tvec3 dirDiffuseWeight = mix( vec3( directionalLightWeightingFull ), vec3( directionalLightWeightingHalf ), wrapRGB );\n\t\t\t#else\n\t\t\t\tfloat dirDiffuseWeight = max( dot( normal, dirVector ), 0.0 );\n\t\t\t#endif\n\t\t\tdirDiffuse += directionalLightColor[ i ] * diffuse * dirDiffuseWeight;\n\t\t\tvec3 dirHalfVector = normalize( dirVector + viewPosition );\n\t\t\tfloat dirDotNormalHalf = max( dot( normal, dirHalfVector ), 0.0 );\n\t\t\tfloat dirSpecularWeight = specularTex.r * max( pow( dirDotNormalHalf, shininess ), 0.0 );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlick = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( dirVector, dirHalfVector ), 0.0 ), 5.0 );\n\t\t\tdirSpecular += schlick * directionalLightColor[ i ] * dirSpecularWeight * dirDiffuseWeight * specularNormalization;\n\t\t}\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\tvec3 hemiDiffuse = vec3( 0.0 );\n\t\tvec3 hemiSpecular = vec3( 0.0 );\n\t\tfor( int i = 0; i < MAX_HEMI_LIGHTS; i ++ ) {\n\t\t\tvec4 lDirection = viewMatrix * vec4( hemisphereLightDirection[ i ], 0.0 );\n\t\t\tvec3 lVector = normalize( lDirection.xyz );\n\t\t\tfloat dotProduct = dot( normal, lVector );\n\t\t\tfloat hemiDiffuseWeight = 0.5 * dotProduct + 0.5;\n\t\t\tvec3 hemiColor = mix( hemisphereLightGroundColor[ i ], hemisphereLightSkyColor[ i ], hemiDiffuseWeight );\n\t\t\themiDiffuse += diffuse * hemiColor;\n\t\t\tvec3 hemiHalfVectorSky = normalize( lVector + viewPosition );\n\t\t\tfloat hemiDotNormalHalfSky = 0.5 * dot( normal, hemiHalfVectorSky ) + 0.5;\n\t\t\tfloat hemiSpecularWeightSky = specularTex.r * max( pow( max( hemiDotNormalHalfSky, 0.0 ), shininess ), 0.0 );\n\t\t\tvec3 lVectorGround = -lVector;\n\t\t\tvec3 hemiHalfVectorGround = normalize( lVectorGround + viewPosition );\n\t\t\tfloat hemiDotNormalHalfGround = 0.5 * dot( normal, hemiHalfVectorGround ) + 0.5;\n\t\t\tfloat hemiSpecularWeightGround = specularTex.r * max( pow( max( hemiDotNormalHalfGround, 0.0 ), shininess ), 0.0 );\n\t\t\tfloat dotProductGround = dot( normal, lVectorGround );\n\t\t\tfloat specularNormalization = ( shininess + 2.0 ) / 8.0;\n\t\t\tvec3 schlickSky = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVector, hemiHalfVectorSky ), 0.0 ), 5.0 );\n\t\t\tvec3 schlickGround = specular + vec3( 1.0 - specular ) * pow( max( 1.0 - dot( lVectorGround, hemiHalfVectorGround ), 0.0 ), 5.0 );\n\t\t\themiSpecular += hemiColor * specularNormalization * ( schlickSky * hemiSpecularWeightSky * max( dotProduct, 0.0 ) + schlickGround * hemiSpecularWeightGround * max( dotProductGround, 0.0 ) );\n\t\t}\n\t#endif\n\tvec3 totalDiffuse = vec3( 0.0 );\n\tvec3 totalSpecular = vec3( 0.0 );\n\t#if MAX_DIR_LIGHTS > 0\n\t\ttotalDiffuse += dirDiffuse;\n\t\ttotalSpecular += dirSpecular;\n\t#endif\n\t#if MAX_HEMI_LIGHTS > 0\n\t\ttotalDiffuse += hemiDiffuse;\n\t\ttotalSpecular += hemiSpecular;\n\t#endif\n\t#if MAX_POINT_LIGHTS > 0\n\t\ttotalDiffuse += pointDiffuse;\n\t\ttotalSpecular += pointSpecular;\n\t#endif\n\t#if MAX_SPOT_LIGHTS > 0\n\t\ttotalDiffuse += spotDiffuse;\n\t\ttotalSpecular += spotSpecular;\n\t#endif\n\t#ifdef METAL\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient + totalSpecular );\n\t#else\n\t\tgl_FragColor.xyz = gl_FragColor.xyz * ( totalDiffuse + ambientLightColor * ambient ) + totalSpecular;\n\t#endif\n\tif ( enableReflection ) {\n\t\tvec3 vReflect;\n\t\tvec3 cameraToVertex = normalize( vWorldPosition - cameraPosition );\n\t\tif ( useRefract ) {\n\t\t\tvReflect = refract( cameraToVertex, normal, refractionRatio );\n\t\t} else {\n\t\t\tvReflect = reflect( cameraToVertex, normal );\n\t\t}\n\t\tvec4 cubeColor = textureCube( tCube, vec3( -vReflect.x, vReflect.yz ) );\n\t\t#ifdef GAMMA_INPUT\n\t\t\tcubeColor.xyz *= cubeColor.xyz;\n\t\t#endif\n\t\tgl_FragColor.xyz = mix( gl_FragColor.xyz, cubeColor.xyz, specularTex.r * reflectivity );\n\t}", +THREE.ShaderChunk.shadowmap_fragment,THREE.ShaderChunk.linear_to_gamma_fragment,THREE.ShaderChunk.fog_fragment,"}"].join("\n"),vertexShader:["attribute vec4 tangent;\nuniform vec2 uOffset;\nuniform vec2 uRepeat;\nuniform bool enableDisplacement;\n#ifdef VERTEX_TEXTURES\n\tuniform sampler2D tDisplacement;\n\tuniform float uDisplacementScale;\n\tuniform float uDisplacementBias;\n#endif\nvarying vec3 vTangent;\nvarying vec3 vBinormal;\nvarying vec3 vNormal;\nvarying vec2 vUv;\nvarying vec3 vWorldPosition;\nvarying vec3 vViewPosition;", +THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.shadowmap_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.skinnormal_vertex,"\t#ifdef USE_SKINNING\n\t\tvNormal = normalize( normalMatrix * skinnedNormal.xyz );\n\t\tvec4 skinnedTangent = skinMatrix * vec4( tangent.xyz, 0.0 );\n\t\tvTangent = normalize( normalMatrix * skinnedTangent.xyz );\n\t#else\n\t\tvNormal = normalize( normalMatrix * normal );\n\t\tvTangent = normalize( normalMatrix * tangent.xyz );\n\t#endif\n\tvBinormal = normalize( cross( vNormal, vTangent ) * tangent.w );\n\tvUv = uv * uRepeat + uOffset;\n\tvec3 displacedPosition;\n\t#ifdef VERTEX_TEXTURES\n\t\tif ( enableDisplacement ) {\n\t\t\tvec3 dv = texture2D( tDisplacement, uv ).xyz;\n\t\t\tfloat df = uDisplacementScale * dv.x + uDisplacementBias;\n\t\t\tdisplacedPosition = position + normalize( normal ) * df;\n\t\t} else {\n\t\t\t#ifdef USE_SKINNING\n\t\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\t\tdisplacedPosition = skinned.xyz;\n\t\t\t#else\n\t\t\t\tdisplacedPosition = position;\n\t\t\t#endif\n\t\t}\n\t#else\n\t\t#ifdef USE_SKINNING\n\t\t\tvec4 skinVertex = bindMatrix * vec4( position, 1.0 );\n\t\t\tvec4 skinned = vec4( 0.0 );\n\t\t\tskinned += boneMatX * skinVertex * skinWeight.x;\n\t\t\tskinned += boneMatY * skinVertex * skinWeight.y;\n\t\t\tskinned += boneMatZ * skinVertex * skinWeight.z;\n\t\t\tskinned += boneMatW * skinVertex * skinWeight.w;\n\t\t\tskinned = bindMatrixInverse * skinned;\n\t\t\tdisplacedPosition = skinned.xyz;\n\t\t#else\n\t\t\tdisplacedPosition = position;\n\t\t#endif\n\t#endif\n\tvec4 mvPosition = modelViewMatrix * vec4( displacedPosition, 1.0 );\n\tvec4 worldPosition = modelMatrix * vec4( displacedPosition, 1.0 );\n\tgl_Position = projectionMatrix * mvPosition;", +THREE.ShaderChunk.logdepthbuf_vertex,"\tvWorldPosition = worldPosition.xyz;\n\tvViewPosition = -mvPosition.xyz;\n\t#ifdef USE_SHADOWMAP\n\t\tfor( int i = 0; i < MAX_SHADOWS; i ++ ) {\n\t\t\tvShadowCoord[ i ] = shadowMatrix[ i ] * worldPosition;\n\t\t}\n\t#endif\n}"].join("\n")},cube:{uniforms:{tCube:{type:"t",value:null},tFlip:{type:"f",value:-1}},vertexShader:["varying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_vertex,"void main() {\n\tvec4 worldPosition = modelMatrix * vec4( position, 1.0 );\n\tvWorldPosition = worldPosition.xyz;\n\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", +THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:["uniform samplerCube tCube;\nuniform float tFlip;\nvarying vec3 vWorldPosition;",THREE.ShaderChunk.logdepthbuf_pars_fragment,"void main() {\n\tgl_FragColor = textureCube( tCube, vec3( tFlip * vWorldPosition.x, vWorldPosition.yz ) );",THREE.ShaderChunk.logdepthbuf_fragment,"}"].join("\n")},depthRGBA:{uniforms:{},vertexShader:[THREE.ShaderChunk.morphtarget_pars_vertex,THREE.ShaderChunk.skinning_pars_vertex,THREE.ShaderChunk.logdepthbuf_pars_vertex, +"void main() {",THREE.ShaderChunk.skinbase_vertex,THREE.ShaderChunk.morphtarget_vertex,THREE.ShaderChunk.skinning_vertex,THREE.ShaderChunk.default_vertex,THREE.ShaderChunk.logdepthbuf_vertex,"}"].join("\n"),fragmentShader:[THREE.ShaderChunk.logdepthbuf_pars_fragment,"vec4 pack_depth( const in float depth ) {\n\tconst vec4 bit_shift = vec4( 256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0 );\n\tconst vec4 bit_mask = vec4( 0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0 );\n\tvec4 res = mod( depth * bit_shift * vec4( 255 ), vec4( 256 ) ) / vec4( 255 );\n\tres -= res.xxyz * bit_mask;\n\treturn res;\n}\nvoid main() {", +THREE.ShaderChunk.logdepthbuf_fragment,"\t#ifdef USE_LOGDEPTHBUF_EXT\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragDepthEXT );\n\t#else\n\t\tgl_FragData[ 0 ] = pack_depth( gl_FragCoord.z );\n\t#endif\n}"].join("\n")}}; +THREE.WebGLRenderer=function(a){function b(a){var b=a.geometry;a=a.material;var c=b.vertices.length;if(a.attributes){void 0===b.__webglCustomAttributesList&&(b.__webglCustomAttributesList=[]);for(var d in a.attributes){var e=a.attributes[d];if(!e.__webglInitialized||e.createUniqueBuffers){e.__webglInitialized=!0;var f=1;"v2"===e.type?f=2:"v3"===e.type?f=3:"v4"===e.type?f=4:"c"===e.type&&(f=3);e.size=f;e.array=new Float32Array(c*f);e.buffer=l.createBuffer();e.buffer.belongsToAttribute=d;e.needsUpdate= +!0}b.__webglCustomAttributesList.push(e)}}}function c(a,b){var c=b.geometry,e=a.faces3,f=3*e.length,g=1*e.length,h=3*e.length,e=d(b,a);a.__vertexArray=new Float32Array(3*f);a.__normalArray=new Float32Array(3*f);a.__colorArray=new Float32Array(3*f);a.__uvArray=new Float32Array(2*f);1Aa;Aa++)Cb=ma[Aa],Ta[Sa]=Cb.x,Ta[Sa+1]=Cb.y,Ta[Sa+2]=Cb.z,Sa+=3;else for(Aa=0;3>Aa;Aa++)Ta[Sa]=pa.x,Ta[Sa+1]=pa.y,Ta[Sa+2]=pa.z,Sa+=3;l.bindBuffer(l.ARRAY_BUFFER,C.__webglNormalBuffer);l.bufferData(l.ARRAY_BUFFER, +Ta,S)}if(Kc&&ua){M=0;for(ea=N.length;MAa;Aa++)Oa=hb[Aa],sb[qb]=Oa.x,sb[qb+1]=Oa.y,qb+=2;0Aa;Aa++)Qb=za[Aa],fb[rb]=Qb.x,fb[rb+1]=Qb.y,rb+=2;0h&&(f[v].counter+=1,k=f[v].hash+"_"+f[v].counter,k in r||(p={id:rc++, +faces3:[],materialIndex:v,vertices:0,numMorphTargets:m,numMorphNormals:n},r[k]=p,q.push(p)));r[k].faces3.push(t);r[k].vertices+=3}a[g]=q;d.groupsNeedUpdate=!1}a=xb[d.id];g=0;for(e=a.length;gDa;Da++)kb[Da]=!J.autoScaleCubemaps||Ob||Tb?Tb?ua.image[Da].image:ua.image[Da]:R(ua.image[Da],$c);var ka=kb[0],Zb=THREE.Math.isPowerOfTwo(ka.width)&&THREE.Math.isPowerOfTwo(ka.height),ab=Q(ua.format),Fb=Q(ua.type);F(l.TEXTURE_CUBE_MAP,ua,Zb);for(Da=0;6>Da;Da++)if(Ob)for(var gb,$b=kb[Da].mipmaps,ga=0,Xb=$b.length;ga=Oc&&console.warn("WebGLRenderer: trying to use "+a+" texture units while this GPU supports only "+ +Oc);dc+=1;return a}function x(a,b){a._modelViewMatrix.multiplyMatrices(b.matrixWorldInverse,a.matrixWorld);a._normalMatrix.getNormalMatrix(a._modelViewMatrix)}function D(a,b,c,d){a[b]=c.r*c.r*d;a[b+1]=c.g*c.g*d;a[b+2]=c.b*c.b*d}function E(a,b,c,d){a[b]=c.r*d;a[b+1]=c.g*d;a[b+2]=c.b*d}function A(a){a!==Pc&&(l.lineWidth(a),Pc=a)}function B(a,b,c){Qc!==a&&(a?l.enable(l.POLYGON_OFFSET_FILL):l.disable(l.POLYGON_OFFSET_FILL),Qc=a);!a||Rc===b&&Sc===c||(l.polygonOffset(b,c),Rc=b,Sc=c)}function F(a,b,c){c? +(l.texParameteri(a,l.TEXTURE_WRAP_S,Q(b.wrapS)),l.texParameteri(a,l.TEXTURE_WRAP_T,Q(b.wrapT)),l.texParameteri(a,l.TEXTURE_MAG_FILTER,Q(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,Q(b.minFilter))):(l.texParameteri(a,l.TEXTURE_WRAP_S,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_WRAP_T,l.CLAMP_TO_EDGE),l.texParameteri(a,l.TEXTURE_MAG_FILTER,T(b.magFilter)),l.texParameteri(a,l.TEXTURE_MIN_FILTER,T(b.minFilter)));(c=pa.get("EXT_texture_filter_anisotropic"))&&b.type!==THREE.FloatType&&(1b||a.height>b){var c=b/Math.max(a.width,a.height),d=document.createElement("canvas");d.width=Math.floor(a.width*c);d.height=Math.floor(a.height*c);d.getContext("2d").drawImage(a,0,0,a.width,a.height,0,0,d.width,d.height);console.log("THREE.WebGLRenderer:",a,"is too big ("+a.width+"x"+a.height+"). Resized to "+d.width+"x"+d.height+ +".");return d}return a}function H(a,b){l.bindRenderbuffer(l.RENDERBUFFER,a);b.depthBuffer&&!b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_COMPONENT16,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT,l.RENDERBUFFER,a)):b.depthBuffer&&b.stencilBuffer?(l.renderbufferStorage(l.RENDERBUFFER,l.DEPTH_STENCIL,b.width,b.height),l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a)):l.renderbufferStorage(l.RENDERBUFFER,l.RGBA4,b.width, +b.height)}function C(a){a instanceof THREE.WebGLRenderTargetCube?(l.bindTexture(l.TEXTURE_CUBE_MAP,a.__webglTexture),l.generateMipmap(l.TEXTURE_CUBE_MAP),l.bindTexture(l.TEXTURE_CUBE_MAP,null)):(l.bindTexture(l.TEXTURE_2D,a.__webglTexture),l.generateMipmap(l.TEXTURE_2D),l.bindTexture(l.TEXTURE_2D,null))}function T(a){return a===THREE.NearestFilter||a===THREE.NearestMipMapNearestFilter||a===THREE.NearestMipMapLinearFilter?l.NEAREST:l.LINEAR}function Q(a){var b;if(a===THREE.RepeatWrapping)return l.REPEAT; +if(a===THREE.ClampToEdgeWrapping)return l.CLAMP_TO_EDGE;if(a===THREE.MirroredRepeatWrapping)return l.MIRRORED_REPEAT;if(a===THREE.NearestFilter)return l.NEAREST;if(a===THREE.NearestMipMapNearestFilter)return l.NEAREST_MIPMAP_NEAREST;if(a===THREE.NearestMipMapLinearFilter)return l.NEAREST_MIPMAP_LINEAR;if(a===THREE.LinearFilter)return l.LINEAR;if(a===THREE.LinearMipMapNearestFilter)return l.LINEAR_MIPMAP_NEAREST;if(a===THREE.LinearMipMapLinearFilter)return l.LINEAR_MIPMAP_LINEAR;if(a===THREE.UnsignedByteType)return l.UNSIGNED_BYTE; +if(a===THREE.UnsignedShort4444Type)return l.UNSIGNED_SHORT_4_4_4_4;if(a===THREE.UnsignedShort5551Type)return l.UNSIGNED_SHORT_5_5_5_1;if(a===THREE.UnsignedShort565Type)return l.UNSIGNED_SHORT_5_6_5;if(a===THREE.ByteType)return l.BYTE;if(a===THREE.ShortType)return l.SHORT;if(a===THREE.UnsignedShortType)return l.UNSIGNED_SHORT;if(a===THREE.IntType)return l.INT;if(a===THREE.UnsignedIntType)return l.UNSIGNED_INT;if(a===THREE.FloatType)return l.FLOAT;if(a===THREE.AlphaFormat)return l.ALPHA;if(a===THREE.RGBFormat)return l.RGB; +if(a===THREE.RGBAFormat)return l.RGBA;if(a===THREE.LuminanceFormat)return l.LUMINANCE;if(a===THREE.LuminanceAlphaFormat)return l.LUMINANCE_ALPHA;if(a===THREE.AddEquation)return l.FUNC_ADD;if(a===THREE.SubtractEquation)return l.FUNC_SUBTRACT;if(a===THREE.ReverseSubtractEquation)return l.FUNC_REVERSE_SUBTRACT;if(a===THREE.ZeroFactor)return l.ZERO;if(a===THREE.OneFactor)return l.ONE;if(a===THREE.SrcColorFactor)return l.SRC_COLOR;if(a===THREE.OneMinusSrcColorFactor)return l.ONE_MINUS_SRC_COLOR;if(a=== +THREE.SrcAlphaFactor)return l.SRC_ALPHA;if(a===THREE.OneMinusSrcAlphaFactor)return l.ONE_MINUS_SRC_ALPHA;if(a===THREE.DstAlphaFactor)return l.DST_ALPHA;if(a===THREE.OneMinusDstAlphaFactor)return l.ONE_MINUS_DST_ALPHA;if(a===THREE.DstColorFactor)return l.DST_COLOR;if(a===THREE.OneMinusDstColorFactor)return l.ONE_MINUS_DST_COLOR;if(a===THREE.SrcAlphaSaturateFactor)return l.SRC_ALPHA_SATURATE;b=pa.get("WEBGL_compressed_texture_s3tc");if(null!==b){if(a===THREE.RGB_S3TC_DXT1_Format)return b.COMPRESSED_RGB_S3TC_DXT1_EXT; +if(a===THREE.RGBA_S3TC_DXT1_Format)return b.COMPRESSED_RGBA_S3TC_DXT1_EXT;if(a===THREE.RGBA_S3TC_DXT3_Format)return b.COMPRESSED_RGBA_S3TC_DXT3_EXT;if(a===THREE.RGBA_S3TC_DXT5_Format)return b.COMPRESSED_RGBA_S3TC_DXT5_EXT}b=pa.get("WEBGL_compressed_texture_pvrtc");if(null!==b){if(a===THREE.RGB_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_4BPPV1_IMG;if(a===THREE.RGB_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGB_PVRTC_2BPPV1_IMG;if(a===THREE.RGBA_PVRTC_4BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_4BPPV1_IMG; +if(a===THREE.RGBA_PVRTC_2BPPV1_Format)return b.COMPRESSED_RGBA_PVRTC_2BPPV1_IMG}b=pa.get("EXT_blend_minmax");if(null!==b){if(a===THREE.MinEquation)return b.MIN_EXT;if(a===THREE.MaxEquation)return b.MAX_EXT}return 0}console.log("THREE.WebGLRenderer",THREE.REVISION);a=a||{};var O=void 0!==a.canvas?a.canvas:document.createElement("canvas"),S=void 0!==a.context?a.context:null,X=void 0!==a.precision?a.precision:"highp",Y=void 0!==a.alpha?a.alpha:!1,la=void 0!==a.depth?a.depth:!0,ma=void 0!==a.stencil? +a.stencil:!0,ya=void 0!==a.antialias?a.antialias:!1,P=void 0!==a.premultipliedAlpha?a.premultipliedAlpha:!0,Ga=void 0!==a.preserveDrawingBuffer?a.preserveDrawingBuffer:!1,Fa=void 0!==a.logarithmicDepthBuffer?a.logarithmicDepthBuffer:!1,za=new THREE.Color(0),bb=0,cb=[],ob={},jb=[],Jb=[],Ib=[],yb=[],Ra=[];this.domElement=O;this.context=null;this.devicePixelRatio=void 0!==a.devicePixelRatio?a.devicePixelRatio:void 0!==self.devicePixelRatio?self.devicePixelRatio:1;this.sortObjects=this.autoClearStencil= +this.autoClearDepth=this.autoClearColor=this.autoClear=!0;this.shadowMapEnabled=this.gammaOutput=this.gammaInput=!1;this.shadowMapType=THREE.PCFShadowMap;this.shadowMapCullFace=THREE.CullFaceFront;this.shadowMapCascade=this.shadowMapDebug=!1;this.maxMorphTargets=8;this.maxMorphNormals=4;this.autoScaleCubemaps=!0;this.info={memory:{programs:0,geometries:0,textures:0},render:{calls:0,vertices:0,faces:0,points:0}};var J=this,hb=[],tc=null,Tc=null,Kb=-1,Oa=-1,ec=null,dc=0,Lb=-1,Mb=-1,pb=-1,Nb=-1,Ob=-1, +Xb=-1,Yb=-1,nb=-1,Qc=null,Rc=null,Sc=null,Pc=null,Pb=0,kc=0,lc=O.width,mc=O.height,Uc=0,Vc=0,wb=new Uint8Array(16),ib=new Uint8Array(16),Ec=new THREE.Frustum,Ac=new THREE.Matrix4,Gc=new THREE.Matrix4,Na=new THREE.Vector3,sa=new THREE.Vector3,fc=!0,Mc={ambient:[0,0,0],directional:{length:0,colors:[],positions:[]},point:{length:0,colors:[],positions:[],distances:[]},spot:{length:0,colors:[],positions:[],distances:[],directions:[],anglesCos:[],exponents:[]},hemi:{length:0,skyColors:[],groundColors:[], +positions:[]}},l;try{var Wc={alpha:Y,depth:la,stencil:ma,antialias:ya,premultipliedAlpha:P,preserveDrawingBuffer:Ga};l=S||O.getContext("webgl",Wc)||O.getContext("experimental-webgl",Wc);if(null===l){if(null!==O.getContext("webgl"))throw"Error creating WebGL context with your selected attributes.";throw"Error creating WebGL context.";}}catch(ad){console.error(ad)}void 0===l.getShaderPrecisionFormat&&(l.getShaderPrecisionFormat=function(){return{rangeMin:1,rangeMax:1,precision:1}});var pa=new THREE.WebGLExtensions(l); +pa.get("OES_texture_float");pa.get("OES_texture_float_linear");pa.get("OES_standard_derivatives");Fa&&pa.get("EXT_frag_depth");l.clearColor(0,0,0,1);l.clearDepth(1);l.clearStencil(0);l.enable(l.DEPTH_TEST);l.depthFunc(l.LEQUAL);l.frontFace(l.CCW);l.cullFace(l.BACK);l.enable(l.CULL_FACE);l.enable(l.BLEND);l.blendEquation(l.FUNC_ADD);l.blendFunc(l.SRC_ALPHA,l.ONE_MINUS_SRC_ALPHA);l.viewport(Pb,kc,lc,mc);l.clearColor(za.r,za.g,za.b,bb);this.context=l;var Oc=l.getParameter(l.MAX_TEXTURE_IMAGE_UNITS), +bd=l.getParameter(l.MAX_VERTEX_TEXTURE_IMAGE_UNITS),cd=l.getParameter(l.MAX_TEXTURE_SIZE),$c=l.getParameter(l.MAX_CUBE_MAP_TEXTURE_SIZE),sc=0b;b++)l.deleteFramebuffer(a.__webglFramebuffer[b]),l.deleteRenderbuffer(a.__webglRenderbuffer[b]); +else l.deleteFramebuffer(a.__webglFramebuffer),l.deleteRenderbuffer(a.__webglRenderbuffer);delete a.__webglFramebuffer;delete a.__webglRenderbuffer}J.info.memory.textures--},Dc=function(a){a=a.target;a.removeEventListener("dispose",Dc);Cc(a)},Yc=function(a){for(var b="__webglVertexBuffer __webglNormalBuffer __webglTangentBuffer __webglColorBuffer __webglUVBuffer __webglUV2Buffer __webglSkinIndicesBuffer __webglSkinWeightsBuffer __webglFaceBuffer __webglLineBuffer __webglLineDistanceBuffer".split(" "), +c=0,d=b.length;cd.numSupportedMorphTargets?(n.sort(p),n.length=d.numSupportedMorphTargets):n.length>d.numSupportedMorphNormals?n.sort(p):0===n.length&&n.push([0, +0]);for(m=0;mf;f++){a.__webglFramebuffer[f]=l.createFramebuffer();a.__webglRenderbuffer[f]=l.createRenderbuffer();l.texImage2D(l.TEXTURE_CUBE_MAP_POSITIVE_X+f,0,d,a.width,a.height,0,d,e,null);var g=a,h=l.TEXTURE_CUBE_MAP_POSITIVE_X+f;l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer[f]);l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,h,g.__webglTexture,0);H(a.__webglRenderbuffer[f],a)}c&&l.generateMipmap(l.TEXTURE_CUBE_MAP)}else a.__webglFramebuffer= +l.createFramebuffer(),a.__webglRenderbuffer=a.shareDepthFrom?a.shareDepthFrom.__webglRenderbuffer:l.createRenderbuffer(),l.bindTexture(l.TEXTURE_2D,a.__webglTexture),F(l.TEXTURE_2D,a,c),l.texImage2D(l.TEXTURE_2D,0,d,a.width,a.height,0,d,e,null),d=l.TEXTURE_2D,l.bindFramebuffer(l.FRAMEBUFFER,a.__webglFramebuffer),l.framebufferTexture2D(l.FRAMEBUFFER,l.COLOR_ATTACHMENT0,d,a.__webglTexture,0),a.shareDepthFrom?a.depthBuffer&&!a.stencilBuffer?l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_ATTACHMENT, +l.RENDERBUFFER,a.__webglRenderbuffer):a.depthBuffer&&a.stencilBuffer&&l.framebufferRenderbuffer(l.FRAMEBUFFER,l.DEPTH_STENCIL_ATTACHMENT,l.RENDERBUFFER,a.__webglRenderbuffer):H(a.__webglRenderbuffer,a),c&&l.generateMipmap(l.TEXTURE_2D);b?l.bindTexture(l.TEXTURE_CUBE_MAP,null):l.bindTexture(l.TEXTURE_2D,null);l.bindRenderbuffer(l.RENDERBUFFER,null);l.bindFramebuffer(l.FRAMEBUFFER,null)}a?(b=b?a.__webglFramebuffer[a.activeCubeFace]:a.__webglFramebuffer,c=a.width,a=a.height,e=d=0):(b=null,c=lc,a=mc, +d=Pb,e=kc);b!==Tc&&(l.bindFramebuffer(l.FRAMEBUFFER,b),l.viewport(d,e,c,a),Tc=b);Uc=c;Vc=a};this.initMaterial=function(){console.warn("THREE.WebGLRenderer: .initMaterial() has been removed.")};this.addPrePlugin=function(){console.warn("THREE.WebGLRenderer: .addPrePlugin() has been removed.")};this.addPostPlugin=function(){console.warn("THREE.WebGLRenderer: .addPostPlugin() has been removed.")};this.updateShadowMap=function(){console.warn("THREE.WebGLRenderer: .updateShadowMap() has been removed.")}}; +THREE.WebGLRenderTarget=function(a,b,c){this.width=a;this.height=b;c=c||{};this.wrapS=void 0!==c.wrapS?c.wrapS:THREE.ClampToEdgeWrapping;this.wrapT=void 0!==c.wrapT?c.wrapT:THREE.ClampToEdgeWrapping;this.magFilter=void 0!==c.magFilter?c.magFilter:THREE.LinearFilter;this.minFilter=void 0!==c.minFilter?c.minFilter:THREE.LinearMipMapLinearFilter;this.anisotropy=void 0!==c.anisotropy?c.anisotropy:1;this.offset=new THREE.Vector2(0,0);this.repeat=new THREE.Vector2(1,1);this.format=void 0!==c.format?c.format: +THREE.RGBAFormat;this.type=void 0!==c.type?c.type:THREE.UnsignedByteType;this.depthBuffer=void 0!==c.depthBuffer?c.depthBuffer:!0;this.stencilBuffer=void 0!==c.stencilBuffer?c.stencilBuffer:!0;this.generateMipmaps=!0;this.shareDepthFrom=null}; +THREE.WebGLRenderTarget.prototype={constructor:THREE.WebGLRenderTarget,setSize:function(a,b){this.width=a;this.height=b},clone:function(){var a=new THREE.WebGLRenderTarget(this.width,this.height);a.wrapS=this.wrapS;a.wrapT=this.wrapT;a.magFilter=this.magFilter;a.minFilter=this.minFilter;a.anisotropy=this.anisotropy;a.offset.copy(this.offset);a.repeat.copy(this.repeat);a.format=this.format;a.type=this.type;a.depthBuffer=this.depthBuffer;a.stencilBuffer=this.stencilBuffer;a.generateMipmaps=this.generateMipmaps; +a.shareDepthFrom=this.shareDepthFrom;return a},dispose:function(){this.dispatchEvent({type:"dispose"})}};THREE.EventDispatcher.prototype.apply(THREE.WebGLRenderTarget.prototype);THREE.WebGLRenderTargetCube=function(a,b,c){THREE.WebGLRenderTarget.call(this,a,b,c);this.activeCubeFace=0};THREE.WebGLRenderTargetCube.prototype=Object.create(THREE.WebGLRenderTarget.prototype); +THREE.WebGLExtensions=function(a){var b={};this.get=function(c){if(void 0!==b[c])return b[c];var d;switch(c){case "OES_texture_float":d=a.getExtension("OES_texture_float");break;case "OES_texture_float_linear":d=a.getExtension("OES_texture_float_linear");break;case "OES_standard_derivatives":d=a.getExtension("OES_standard_derivatives");break;case "EXT_texture_filter_anisotropic":d=a.getExtension("EXT_texture_filter_anisotropic")||a.getExtension("MOZ_EXT_texture_filter_anisotropic")||a.getExtension("WEBKIT_EXT_texture_filter_anisotropic"); +break;case "WEBGL_compressed_texture_s3tc":d=a.getExtension("WEBGL_compressed_texture_s3tc")||a.getExtension("MOZ_WEBGL_compressed_texture_s3tc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_s3tc");break;case "WEBGL_compressed_texture_pvrtc":d=a.getExtension("WEBGL_compressed_texture_pvrtc")||a.getExtension("WEBKIT_WEBGL_compressed_texture_pvrtc");break;case "OES_element_index_uint":d=a.getExtension("OES_element_index_uint");break;case "EXT_blend_minmax":d=a.getExtension("EXT_blend_minmax");break; +case "EXT_frag_depth":d=a.getExtension("EXT_frag_depth")}null===d&&console.log("THREE.WebGLRenderer: "+c+" extension not supported.");return b[c]=d}}; +THREE.WebGLProgram=function(){var a=0;return function(b,c,d,e){var f=b.context,g=d.defines,h=d.__webglShader.uniforms,k=d.attributes,n=d.__webglShader.vertexShader,p=d.__webglShader.fragmentShader,q=d.index0AttributeName;void 0===q&&!0===e.morphTargets&&(q="position");var m="SHADOWMAP_TYPE_BASIC";e.shadowMapType===THREE.PCFShadowMap?m="SHADOWMAP_TYPE_PCF":e.shadowMapType===THREE.PCFSoftShadowMap&&(m="SHADOWMAP_TYPE_PCF_SOFT");var r,t;r=[];for(var s in g)t=g[s],!1!==t&&(t="#define "+s+" "+t,r.push(t)); +r=r.join("\n");g=f.createProgram();d instanceof THREE.RawShaderMaterial?b=d="":(d=["precision "+e.precision+" float;","precision "+e.precision+" int;",r,e.supportsVertexTextures?"#define VERTEX_TEXTURES":"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"","#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,"#define MAX_BONES "+ +e.maxBones,e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.skinning?"#define USE_SKINNING":"",e.useVertexTexture?"#define BONE_TEXTURE":"",e.morphTargets?"#define USE_MORPHTARGETS":"",e.morphNormals?"#define USE_MORPHNORMALS":"",e.wrapAround?"#define WRAP_AROUND": +"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP":"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.sizeAttenuation?"#define USE_SIZEATTENUATION":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 modelMatrix;\nuniform mat4 modelViewMatrix;\nuniform mat4 projectionMatrix;\nuniform mat4 viewMatrix;\nuniform mat3 normalMatrix;\nuniform vec3 cameraPosition;\nattribute vec3 position;\nattribute vec3 normal;\nattribute vec2 uv;\nattribute vec2 uv2;\n#ifdef USE_COLOR\n\tattribute vec3 color;\n#endif\n#ifdef USE_MORPHTARGETS\n\tattribute vec3 morphTarget0;\n\tattribute vec3 morphTarget1;\n\tattribute vec3 morphTarget2;\n\tattribute vec3 morphTarget3;\n\t#ifdef USE_MORPHNORMALS\n\t\tattribute vec3 morphNormal0;\n\t\tattribute vec3 morphNormal1;\n\t\tattribute vec3 morphNormal2;\n\t\tattribute vec3 morphNormal3;\n\t#else\n\t\tattribute vec3 morphTarget4;\n\t\tattribute vec3 morphTarget5;\n\t\tattribute vec3 morphTarget6;\n\t\tattribute vec3 morphTarget7;\n\t#endif\n#endif\n#ifdef USE_SKINNING\n\tattribute vec4 skinIndex;\n\tattribute vec4 skinWeight;\n#endif\n"].join("\n"), +b=["precision "+e.precision+" float;","precision "+e.precision+" int;",e.bumpMap||e.normalMap?"#extension GL_OES_standard_derivatives : enable":"",r,"#define MAX_DIR_LIGHTS "+e.maxDirLights,"#define MAX_POINT_LIGHTS "+e.maxPointLights,"#define MAX_SPOT_LIGHTS "+e.maxSpotLights,"#define MAX_HEMI_LIGHTS "+e.maxHemiLights,"#define MAX_SHADOWS "+e.maxShadows,e.alphaTest?"#define ALPHATEST "+e.alphaTest:"",b.gammaInput?"#define GAMMA_INPUT":"",b.gammaOutput?"#define GAMMA_OUTPUT":"",e.useFog&&e.fog?"#define USE_FOG": +"",e.useFog&&e.fogExp?"#define FOG_EXP2":"",e.map?"#define USE_MAP":"",e.envMap?"#define USE_ENVMAP":"",e.lightMap?"#define USE_LIGHTMAP":"",e.bumpMap?"#define USE_BUMPMAP":"",e.normalMap?"#define USE_NORMALMAP":"",e.specularMap?"#define USE_SPECULARMAP":"",e.alphaMap?"#define USE_ALPHAMAP":"",e.vertexColors?"#define USE_COLOR":"",e.metal?"#define METAL":"",e.wrapAround?"#define WRAP_AROUND":"",e.doubleSided?"#define DOUBLE_SIDED":"",e.flipSided?"#define FLIP_SIDED":"",e.shadowMapEnabled?"#define USE_SHADOWMAP": +"",e.shadowMapEnabled?"#define "+m:"",e.shadowMapDebug?"#define SHADOWMAP_DEBUG":"",e.shadowMapCascade?"#define SHADOWMAP_CASCADE":"",e.logarithmicDepthBuffer?"#define USE_LOGDEPTHBUF":"","uniform mat4 viewMatrix;\nuniform vec3 cameraPosition;\n"].join("\n"));n=new THREE.WebGLShader(f,f.VERTEX_SHADER,d+n);p=new THREE.WebGLShader(f,f.FRAGMENT_SHADER,b+p);f.attachShader(g,n);f.attachShader(g,p);void 0!==q&&f.bindAttribLocation(g,0,q);f.linkProgram(g);!1===f.getProgramParameter(g,f.LINK_STATUS)&&(console.error("THREE.WebGLProgram: Could not initialise shader."), +console.error("gl.VALIDATE_STATUS",f.getProgramParameter(g,f.VALIDATE_STATUS)),console.error("gl.getError()",f.getError()));""!==f.getProgramInfoLog(g)&&console.warn("THREE.WebGLProgram: gl.getProgramInfoLog()",f.getProgramInfoLog(g));f.deleteShader(n);f.deleteShader(p);q="viewMatrix modelViewMatrix projectionMatrix normalMatrix modelMatrix cameraPosition morphTargetInfluences bindMatrix bindMatrixInverse".split(" ");e.useVertexTexture?(q.push("boneTexture"),q.push("boneTextureWidth"),q.push("boneTextureHeight")): +q.push("boneGlobalMatrices");e.logarithmicDepthBuffer&&q.push("logDepthBufFC");for(var u in h)q.push(u);h=q;u={};q=0;for(b=h.length;qT;T++)F[T]=new THREE.Vector3,A[T]=new THREE.Vector3;F=B.shadowCascadeNearZ[C];B=B.shadowCascadeFarZ[C];A[0].set(-1,-1,F);A[1].set(1,-1,F);A[2].set(-1,1,F);A[3].set(1,1,F);A[4].set(-1,-1,B);A[5].set(1,-1,B);A[6].set(-1,1,B);A[7].set(1,1,B);H.originalCamera=v;A=new THREE.Gyroscope;A.position.copy(x.shadowCascadeOffset);A.add(H);A.add(H.target);v.add(A);x.shadowCascadeArray[E]=H;console.log("Created virtualLight",H)}C= +x;F=E;B=C.shadowCascadeArray[F];B.position.copy(C.position);B.target.position.copy(C.target.position);B.lookAt(B.target);B.shadowCameraVisible=C.shadowCameraVisible;B.shadowDarkness=C.shadowDarkness;B.shadowBias=C.shadowCascadeBias[F];A=C.shadowCascadeNearZ[F];C=C.shadowCascadeFarZ[F];B=B.pointsFrustum;B[0].z=A;B[1].z=A;B[2].z=A;B[3].z=A;B[4].z=C;B[5].z=C;B[6].z=C;B[7].z=C;R[D]=H;D++}else R[D]=x,D++;u=0;for(K=R.length;uC;C++)F=B[C],F.copy(A[C]),F.unproject(E),F.applyMatrix4(D.matrixWorldInverse),F.xr.x&&(r.x=F.x),F.yr.y&&(r.y=F.y),F.zr.z&&(r.z=F.z);D.left=m.x;D.right=r.x;D.top=r.y;D.bottom=m.y;D.updateProjectionMatrix()}D=x.shadowMap;A=x.shadowMatrix;E=x.shadowCamera;E.position.setFromMatrixPosition(x.matrixWorld);t.setFromMatrixPosition(x.target.matrixWorld);E.lookAt(t);E.updateMatrixWorld();E.matrixWorldInverse.getInverse(E.matrixWorld);x.cameraHelper&& +(x.cameraHelper.visible=x.shadowCameraVisible);x.shadowCameraVisible&&x.cameraHelper.update();A.set(.5,0,0,.5,0,.5,0,.5,0,0,.5,.5,0,0,0,1);A.multiply(E.projectionMatrix);A.multiply(E.matrixWorldInverse);q.multiplyMatrices(E.projectionMatrix,E.matrixWorldInverse);p.setFromMatrix(q);a.setRenderTarget(D);a.clear();s.length=0;e(c,c,E);x=0;for(D=s.length;x 0 ) {\nfloat depth = gl_FragCoord.z / gl_FragCoord.w;\nfloat fogFactor = 0.0;\nif ( fogType == 1 ) {\nfogFactor = smoothstep( fogNear, fogFar, depth );\n} else {\nconst float LOG2 = 1.442695;\nfloat fogFactor = exp2( - fogDensity * fogDensity * depth * depth * LOG2 );\nfogFactor = 1.0 - clamp( fogFactor, 0.0, 1.0 );\n}\ngl_FragColor = mix( gl_FragColor, vec4( fogColor, gl_FragColor.w ), fogFactor );\n}\n}"].join("\n")); +w.compileShader(R);w.compileShader(H);w.attachShader(F,R);w.attachShader(F,H);w.linkProgram(F);D=F;v=w.getAttribLocation(D,"position");y=w.getAttribLocation(D,"uv");c=w.getUniformLocation(D,"uvOffset");d=w.getUniformLocation(D,"uvScale");e=w.getUniformLocation(D,"rotation");f=w.getUniformLocation(D,"scale");g=w.getUniformLocation(D,"color");h=w.getUniformLocation(D,"map");k=w.getUniformLocation(D,"opacity");n=w.getUniformLocation(D,"modelViewMatrix");p=w.getUniformLocation(D,"projectionMatrix");q= +w.getUniformLocation(D,"fogType");m=w.getUniformLocation(D,"fogDensity");r=w.getUniformLocation(D,"fogNear");t=w.getUniformLocation(D,"fogFar");s=w.getUniformLocation(D,"fogColor");u=w.getUniformLocation(D,"alphaTest");F=document.createElement("canvas");F.width=8;F.height=8;R=F.getContext("2d");R.fillStyle="white";R.fillRect(0,0,8,8);E=new THREE.Texture(F);E.needsUpdate=!0}w.useProgram(D);w.enableVertexAttribArray(v);w.enableVertexAttribArray(y);w.disable(w.CULL_FACE);w.enable(w.BLEND);w.bindBuffer(w.ARRAY_BUFFER, +K);w.vertexAttribPointer(v,2,w.FLOAT,!1,16,0);w.vertexAttribPointer(y,2,w.FLOAT,!1,16,8);w.bindBuffer(w.ELEMENT_ARRAY_BUFFER,x);w.uniformMatrix4fv(p,!1,B.projectionMatrix.elements);w.activeTexture(w.TEXTURE0);w.uniform1i(h,0);R=F=0;(H=A.fog)?(w.uniform3f(s,H.color.r,H.color.g,H.color.b),H instanceof THREE.Fog?(w.uniform1f(r,H.near),w.uniform1f(t,H.far),w.uniform1i(q,1),R=F=1):H instanceof THREE.FogExp2&&(w.uniform1f(m,H.density),w.uniform1i(q,2),R=F=2)):(w.uniform1i(q,0),R=F=0);for(var H=0,C=b.length;H< +C;H++){var T=b[H];T._modelViewMatrix.multiplyMatrices(B.matrixWorldInverse,T.matrixWorld);T.z=null===T.renderDepth?-T._modelViewMatrix.elements[14]:T.renderDepth}b.sort(G);for(var Q=[],H=0,C=b.length;Hq-1?0:q-1,r=q+1>e-1?e-1:q+1,t=0>p-1?0:p-1,s=p+1>d-1?d-1:p+1,u=[],v=[0,0,h[4*(q*d+p)]/255*b];u.push([-1,0,h[4*(q*d+t)]/255*b]);u.push([-1,-1,h[4*(m*d+t)]/255*b]);u.push([0,-1,h[4*(m*d+p)]/255*b]);u.push([1,-1,h[4*(m*d+s)]/255*b]);u.push([1,0,h[4*(q*d+s)]/255*b]);u.push([1,1,h[4*(r*d+s)]/255*b]);u.push([0,1,h[4*(r*d+p)]/255* +b]);u.push([-1,1,h[4*(r*d+t)]/255*b]);m=[];t=u.length;for(r=0;re)return null;var f=[],g=[],h=[],k,n,p;if(0=q--){console.log("Warning, unable to triangulate polygon!");break}k=n;e<=k&&(k=0);n=k+1;e<=n&&(n=0);p=n+1;e<=p&&(p=0);var m;a:{var r=m=void 0,t=void 0,s=void 0,u=void 0,v=void 0,y=void 0,G=void 0,w=void 0, +r=a[g[k]].x,t=a[g[k]].y,s=a[g[n]].x,u=a[g[n]].y,v=a[g[p]].x,y=a[g[p]].y;if(1E-10>(s-r)*(y-t)-(u-t)*(v-r))m=!1;else{var K=void 0,x=void 0,D=void 0,E=void 0,A=void 0,B=void 0,F=void 0,R=void 0,H=void 0,C=void 0,H=R=F=w=G=void 0,K=v-s,x=y-u,D=r-v,E=t-y,A=s-r,B=u-t;for(m=0;mk)g=d+1;else if(0b&&(b=0);1=b)return b=c[a]-b,a=this.curves[a],b=1-b/a.getLength(),a.getPointAt(b);a++}return null};THREE.CurvePath.prototype.getLength=function(){var a=this.getCurveLengths();return a[a.length-1]}; +THREE.CurvePath.prototype.getCurveLengths=function(){if(this.cacheLengths&&this.cacheLengths.length==this.curves.length)return this.cacheLengths;var a=[],b=0,c,d=this.curves.length;for(c=0;cb?b=h.x:h.xc?c=h.y:h.yd?d=h.z:h.zMath.abs(d.x-c[0].x)&&1E-10>Math.abs(d.y-c[0].y)&&c.splice(c.length-1,1);b&&c.push(c[0]);return c}; +THREE.Path.prototype.toShapes=function(a,b){function c(a){for(var b=[],c=0,d=a.length;cm&&(g=b[f],k=-k,h=b[e],m=-m),!(a.yh.y))if(a.y==g.y){if(a.x==g.x)return!0}else{e=m*(a.x-g.x)-k*(a.y-g.y);if(0==e)return!0;0>e||(d=!d)}}else if(a.y==g.y&&(h.x<=a.x&&a.x<=g.x||g.x<=a.x&&a.x<= +h.x))return!0}return d}var e=function(a){var b,c,d,e,f=[],g=new THREE.Path;b=0;for(c=a.length;bE||E>D)return[];k=n*p-k*q;if(0>k||k>D)return[]}else{if(0d?[]:k==d?f?[]:[g]:a<=d?[g,h]: +[g,n]}function e(a,b,c,d){var e=b.x-a.x,f=b.y-a.y;b=c.x-a.x;c=c.y-a.y;var g=d.x-a.x;d=d.y-a.y;a=e*c-f*b;e=e*d-f*g;return 1E-10f&&(f=d);var g=a+1;g>d&&(g=0);d=e(h[a],h[f],h[g],k[b]);if(!d)return!1; +d=k.length-1;f=b-1;0>f&&(f=d);g=b+1;g>d&&(g=0);return(d=e(k[b],k[f],k[g],h[a]))?!0:!1}function f(a,b){var c,e;for(c=0;cC){console.log("Infinite Loop! Holes left:"+ +n.length+", Probably Hole outside Shape!");break}for(q=B;qh;h++)n=k[h].x+":"+k[h].y, +n=p[n],void 0!==n&&(k[h]=n);return q.concat()},isClockWise:function(a){return 0>THREE.FontUtils.Triangulate.area(a)},b2p0:function(a,b){var c=1-a;return c*c*b},b2p1:function(a,b){return 2*(1-a)*a*b},b2p2:function(a,b){return a*a*b},b2:function(a,b,c,d){return this.b2p0(a,b)+this.b2p1(a,c)+this.b2p2(a,d)},b3p0:function(a,b){var c=1-a;return c*c*c*b},b3p1:function(a,b){var c=1-a;return 3*c*c*a*b},b3p2:function(a,b){return 3*(1-a)*a*a*b},b3p3:function(a,b){return a*a*a*b},b3:function(a,b,c,d,e){return this.b3p0(a, +b)+this.b3p1(a,c)+this.b3p2(a,d)+this.b3p3(a,e)}};THREE.LineCurve=function(a,b){this.v1=a;this.v2=b};THREE.LineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.LineCurve.prototype.getPoint=function(a){var b=this.v2.clone().sub(this.v1);b.multiplyScalar(a).add(this.v1);return b};THREE.LineCurve.prototype.getPointAt=function(a){return this.getPoint(a)};THREE.LineCurve.prototype.getTangent=function(a){return this.v2.clone().sub(this.v1).normalize()}; +THREE.QuadraticBezierCurve=function(a,b,c){this.v0=a;this.v1=b;this.v2=c};THREE.QuadraticBezierCurve.prototype=Object.create(THREE.Curve.prototype);THREE.QuadraticBezierCurve.prototype.getPoint=function(a){var b=new THREE.Vector2;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);return b}; +THREE.QuadraticBezierCurve.prototype.getTangent=function(a){var b=new THREE.Vector2;b.x=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Curve.Utils.tangentQuadraticBezier(a,this.v0.y,this.v1.y,this.v2.y);return b.normalize()};THREE.CubicBezierCurve=function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d};THREE.CubicBezierCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.CubicBezierCurve.prototype.getPoint=function(a){var b;b=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);return new THREE.Vector2(b,a)};THREE.CubicBezierCurve.prototype.getTangent=function(a){var b;b=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);a=THREE.Curve.Utils.tangentCubicBezier(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b=new THREE.Vector2(b,a);b.normalize();return b}; +THREE.SplineCurve=function(a){this.points=void 0==a?[]:a};THREE.SplineCurve.prototype=Object.create(THREE.Curve.prototype);THREE.SplineCurve.prototype.getPoint=function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector2;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);return c}; +THREE.EllipseCurve=function(a,b,c,d,e,f,g){this.aX=a;this.aY=b;this.xRadius=c;this.yRadius=d;this.aStartAngle=e;this.aEndAngle=f;this.aClockwise=g};THREE.EllipseCurve.prototype=Object.create(THREE.Curve.prototype); +THREE.EllipseCurve.prototype.getPoint=function(a){var b=this.aEndAngle-this.aStartAngle;0>b&&(b+=2*Math.PI);b>2*Math.PI&&(b-=2*Math.PI);a=!0===this.aClockwise?this.aEndAngle+(1-a)*(2*Math.PI-b):this.aStartAngle+a*b;b=new THREE.Vector2;b.x=this.aX+this.xRadius*Math.cos(a);b.y=this.aY+this.yRadius*Math.sin(a);return b};THREE.ArcCurve=function(a,b,c,d,e,f){THREE.EllipseCurve.call(this,a,b,c,c,d,e,f)};THREE.ArcCurve.prototype=Object.create(THREE.EllipseCurve.prototype); +THREE.LineCurve3=THREE.Curve.create(function(a,b){this.v1=a;this.v2=b},function(a){var b=new THREE.Vector3;b.subVectors(this.v2,this.v1);b.multiplyScalar(a);b.add(this.v1);return b});THREE.QuadraticBezierCurve3=THREE.Curve.create(function(a,b,c){this.v0=a;this.v1=b;this.v2=c},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b2(a,this.v0.x,this.v1.x,this.v2.x);b.y=THREE.Shape.Utils.b2(a,this.v0.y,this.v1.y,this.v2.y);b.z=THREE.Shape.Utils.b2(a,this.v0.z,this.v1.z,this.v2.z);return b}); +THREE.CubicBezierCurve3=THREE.Curve.create(function(a,b,c,d){this.v0=a;this.v1=b;this.v2=c;this.v3=d},function(a){var b=new THREE.Vector3;b.x=THREE.Shape.Utils.b3(a,this.v0.x,this.v1.x,this.v2.x,this.v3.x);b.y=THREE.Shape.Utils.b3(a,this.v0.y,this.v1.y,this.v2.y,this.v3.y);b.z=THREE.Shape.Utils.b3(a,this.v0.z,this.v1.z,this.v2.z,this.v3.z);return b}); +THREE.SplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-1;var c=Math.floor(a);a-=c;var d=b[0==c?c:c-1],e=b[c],f=b[c>b.length-2?b.length-1:c+1],b=b[c>b.length-3?b.length-1:c+2],c=new THREE.Vector3;c.x=THREE.Curve.Utils.interpolate(d.x,e.x,f.x,b.x,a);c.y=THREE.Curve.Utils.interpolate(d.y,e.y,f.y,b.y,a);c.z=THREE.Curve.Utils.interpolate(d.z,e.z,f.z,b.z,a);return c}); +THREE.ClosedSplineCurve3=THREE.Curve.create(function(a){this.points=void 0==a?[]:a},function(a){var b=this.points;a*=b.length-0;var c=Math.floor(a);a-=c;var c=c+(0a.hierarchy[b].keys[c].time&&(a.hierarchy[b].keys[c].time= +0),void 0!==a.hierarchy[b].keys[c].rot&&!(a.hierarchy[b].keys[c].rot instanceof THREE.Quaternion)){var d=a.hierarchy[b].keys[c].rot;a.hierarchy[b].keys[c].rot=(new THREE.Quaternion).fromArray(d)}if(a.hierarchy[b].keys.length&&void 0!==a.hierarchy[b].keys[0].morphTargets){d={};for(c=0;cd;d++){for(var e=this.keyTypes[d],f=this.data.hierarchy[a].keys[0],g=this.getNextKeyWith(e,a,1);g.timef.index;)f=g,g=this.getNextKeyWith(e,a,g.index+1);c.prevKey[e]=f;c.nextKey[e]=g}}}; +THREE.Animation.prototype.resetBlendWeights=function(){for(var a=0,b=this.hierarchy.length;aa.length-2?q:q+1;c[3]=q>a.length-3?q:q+2;q=a[c[0]];r=a[c[1]];t=a[c[2]];s=a[c[3]];c=e*e;m=e*c;d[0]=f(q[0],r[0],t[0],s[0],e,c,m);d[1]=f(q[1],r[1],t[1],s[1],e,c,m);d[2]=f(q[2],r[2],t[2],s[2],e,c,m);return d},f=function(a,b,c,d,e,f,m){a=.5*(c-a);d=.5*(d-b);return(2*(b-c)+a+d)*m+ +(-3*(b-c)-2*a-d)*f+a*e+b};return function(f){if(!1!==this.isPlaying&&(this.currentTime+=f*this.timeScale,0!==this.weight)){f=this.data.length;if(this.currentTime>f||0>this.currentTime)if(this.loop)this.currentTime%=f,0>this.currentTime&&(this.currentTime+=f),this.reset();else{this.stop();return}f=0;for(var h=this.hierarchy.length;fq;q++){var m=this.keyTypes[q],r=n.prevKey[m],t=n.nextKey[m]; +if(0this.timeScale&&r.time>=this.currentTime){r=this.data.hierarchy[f].keys[0];for(t=this.getNextKeyWith(m,f,1);t.timer.index;)r=t,t=this.getNextKeyWith(m,f,t.index+1);n.prevKey[m]=r;n.nextKey[m]=t}k.matrixAutoUpdate=!0;k.matrixWorldNeedsUpdate=!0;var s=(this.currentTime-r.time)/(t.time-r.time),u=r[m],v=t[m];0>s&&(s=0);1a&&(this.currentTime%=a);this.currentTime=Math.min(this.currentTime,a);a=0;for(var b=this.hierarchy.length;af.index;)f=g,g=e[f.index+1];d.prevKey= +f;d.nextKey=g}g.time>=this.currentTime?f.interpolate(g,this.currentTime):f.interpolate(g,g.time);this.data.hierarchy[a].node.updateMatrix();c.matrixWorldNeedsUpdate=!0}}}};THREE.KeyFrameAnimation.prototype.getNextKeyWith=function(a,b,c){b=this.data.hierarchy[b].keys;for(c%=b.length;cthis.duration&&(this.currentTime%=this.duration);this.currentTime=Math.min(this.currentTime,this.duration);c=this.duration/this.frames;var d=Math.floor(this.currentTime/c);d!=b&&(this.mesh.morphTargetInfluences[a]=0,this.mesh.morphTargetInfluences[b]=1,this.mesh.morphTargetInfluences[d]= +0,a=b,b=d);this.mesh.morphTargetInfluences[d]=this.currentTime%c/c;this.mesh.morphTargetInfluences[a]=1-this.mesh.morphTargetInfluences[d]}}}()}; +THREE.BoxGeometry=function(a,b,c,d,e,f){function g(a,b,c,d,e,f,g,s){var u,v=h.widthSegments,y=h.heightSegments,G=e/2,w=f/2,K=h.vertices.length;if("x"===a&&"y"===b||"y"===a&&"x"===b)u="z";else if("x"===a&&"z"===b||"z"===a&&"x"===b)u="y",y=h.depthSegments;else if("z"===a&&"y"===b||"y"===a&&"z"===b)u="x",v=h.depthSegments;var x=v+1,D=y+1,E=e/v,A=f/y,B=new THREE.Vector3;B[u]=0=d)return new THREE.Vector2(c,a);d=Math.sqrt(d/2)}else a=!1,1E-10d?-1E-10>f&&(a=!0):Math.sign(e)== +Math.sign(g)&&(a=!0),a?(c=-e,a=d,d=Math.sqrt(h)):(c=d,a=e,d=Math.sqrt(h/2));return new THREE.Vector2(c/d,a/d)}function e(a,b){var c,d;for(P=a.length;0<=--P;){c=P;d=P-1;0>d&&(d=a.length-1);for(var e=0,f=r+2*p,e=0;eMath.abs(b.y-c.y)?[new THREE.Vector2(b.x,1-b.z),new THREE.Vector2(c.x,1-c.z),new THREE.Vector2(d.x,1-d.z),new THREE.Vector2(e.x,1-e.z)]:[new THREE.Vector2(b.y,1-b.z),new THREE.Vector2(c.y,1-c.z),new THREE.Vector2(d.y, +1-d.z),new THREE.Vector2(e.y,1-e.z)]}};THREE.ShapeGeometry=function(a,b){THREE.Geometry.call(this);this.type="ShapeGeometry";!1===a instanceof Array&&(a=[a]);this.addShapeList(a,b);this.computeFaceNormals()};THREE.ShapeGeometry.prototype=Object.create(THREE.Geometry.prototype);THREE.ShapeGeometry.prototype.addShapeList=function(a,b){for(var c=0,d=a.length;cc&&1===a.x&&(a=new THREE.Vector2(a.x-1,a.y));0===b.x&&0===b.z&&(a=new THREE.Vector2(c/2/Math.PI+.5, +a.y));return a.clone()}THREE.Geometry.call(this);this.type="PolyhedronGeometry";this.parameters={vertices:a,indices:b,radius:c,detail:d};c=c||1;d=d||0;for(var k=this,n=0,p=a.length;nr&&(.2>d&&(b[0].x+=1),.2>a&&(b[1].x+=1),.2>q&&(b[2].x+=1));n=0;for(p=this.vertices.length;nc.y?this.quaternion.set(1,0,0,0):(a.set(c.z,0,-c.x).normalize(),b=Math.acos(c.y),this.quaternion.setFromAxisAngle(a,b))}}(); +THREE.ArrowHelper.prototype.setLength=function(a,b,c){void 0===b&&(b=.2*a);void 0===c&&(c=.2*b);this.line.scale.set(1,a,1);this.line.updateMatrix();this.cone.scale.set(c,b,c);this.cone.position.y=a;this.cone.updateMatrix()};THREE.ArrowHelper.prototype.setColor=function(a){this.line.material.color.set(a);this.cone.material.color.set(a)}; +THREE.BoxHelper=function(a){var b=new THREE.BufferGeometry;b.addAttribute("position",new THREE.BufferAttribute(new Float32Array(72),3));THREE.Line.call(this,b,new THREE.LineBasicMaterial({color:16776960}),THREE.LinePieces);void 0!==a&&this.update(a)};THREE.BoxHelper.prototype=Object.create(THREE.Line.prototype); +THREE.BoxHelper.prototype.update=function(a){var b=a.geometry;null===b.boundingBox&&b.computeBoundingBox();var c=b.boundingBox.min,b=b.boundingBox.max,d=this.geometry.attributes.position.array;d[0]=b.x;d[1]=b.y;d[2]=b.z;d[3]=c.x;d[4]=b.y;d[5]=b.z;d[6]=c.x;d[7]=b.y;d[8]=b.z;d[9]=c.x;d[10]=c.y;d[11]=b.z;d[12]=c.x;d[13]=c.y;d[14]=b.z;d[15]=b.x;d[16]=c.y;d[17]=b.z;d[18]=b.x;d[19]=c.y;d[20]=b.z;d[21]=b.x;d[22]=b.y;d[23]=b.z;d[24]=b.x;d[25]=b.y;d[26]=c.z;d[27]=c.x;d[28]=b.y;d[29]=c.z;d[30]=c.x;d[31]=b.y; +d[32]=c.z;d[33]=c.x;d[34]=c.y;d[35]=c.z;d[36]=c.x;d[37]=c.y;d[38]=c.z;d[39]=b.x;d[40]=c.y;d[41]=c.z;d[42]=b.x;d[43]=c.y;d[44]=c.z;d[45]=b.x;d[46]=b.y;d[47]=c.z;d[48]=b.x;d[49]=b.y;d[50]=b.z;d[51]=b.x;d[52]=b.y;d[53]=c.z;d[54]=c.x;d[55]=b.y;d[56]=b.z;d[57]=c.x;d[58]=b.y;d[59]=c.z;d[60]=c.x;d[61]=c.y;d[62]=b.z;d[63]=c.x;d[64]=c.y;d[65]=c.z;d[66]=b.x;d[67]=c.y;d[68]=b.z;d[69]=b.x;d[70]=c.y;d[71]=c.z;this.geometry.attributes.position.needsUpdate=!0;this.geometry.computeBoundingSphere();this.matrix=a.matrixWorld; +this.matrixAutoUpdate=!1};THREE.BoundingBoxHelper=function(a,b){var c=void 0!==b?b:8947848;this.object=a;this.box=new THREE.Box3;THREE.Mesh.call(this,new THREE.BoxGeometry(1,1,1),new THREE.MeshBasicMaterial({color:c,wireframe:!0}))};THREE.BoundingBoxHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.BoundingBoxHelper.prototype.update=function(){this.box.setFromObject(this.object);this.box.size(this.scale);this.box.center(this.position)}; +THREE.CameraHelper=function(a){function b(a,b,d){c(a,d);c(b,d)}function c(a,b){d.vertices.push(new THREE.Vector3);d.colors.push(new THREE.Color(b));void 0===f[a]&&(f[a]=[]);f[a].push(d.vertices.length-1)}var d=new THREE.Geometry,e=new THREE.LineBasicMaterial({color:16777215,vertexColors:THREE.FaceColors}),f={};b("n1","n2",16755200);b("n2","n4",16755200);b("n4","n3",16755200);b("n3","n1",16755200);b("f1","f2",16755200);b("f2","f4",16755200);b("f4","f3",16755200);b("f3","f1",16755200);b("n1","f1",16755200); +b("n2","f2",16755200);b("n3","f3",16755200);b("n4","f4",16755200);b("p","n1",16711680);b("p","n2",16711680);b("p","n3",16711680);b("p","n4",16711680);b("u1","u2",43775);b("u2","u3",43775);b("u3","u1",43775);b("c","t",16777215);b("p","c",3355443);b("cn1","cn2",3355443);b("cn3","cn4",3355443);b("cf1","cf2",3355443);b("cf3","cf4",3355443);THREE.Line.call(this,d,e,THREE.LinePieces);this.camera=a;this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1;this.pointMap=f;this.update()}; +THREE.CameraHelper.prototype=Object.create(THREE.Line.prototype); +THREE.CameraHelper.prototype.update=function(){var a,b,c=new THREE.Vector3,d=new THREE.Camera,e=function(e,g,h,k){c.set(g,h,k).unproject(d);e=b[e];if(void 0!==e)for(g=0,h=e.length;gt;t++){d[0]=r[g[t]];d[1]=r[g[(t+1)%3]];d.sort(f);var s=d.toString();void 0===e[s]?(e[s]={vert1:d[0],vert2:d[1],face1:q,face2:void 0},p++):e[s].face2=q}d=new Float32Array(6*p);f=0;for(s in e)if(g=e[s],void 0===g.face2|| +.9999>k[g.face1].normal.dot(k[g.face2].normal))p=n[g.vert1],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z,p=n[g.vert2],d[f++]=p.x,d[f++]=p.y,d[f++]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3));THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.EdgesHelper.prototype=Object.create(THREE.Line.prototype); +THREE.FaceNormalsHelper=function(a,b,c,d){this.object=a;this.size=void 0!==b?b:1;a=void 0!==c?c:16776960;d=void 0!==d?d:1;b=new THREE.Geometry;c=0;for(var e=this.object.geometry.faces.length;cb;b++)a.faces[b].color=this.colors[4>b?0:1];b=new THREE.MeshBasicMaterial({vertexColors:THREE.FaceColors,wireframe:!0});this.lightSphere=new THREE.Mesh(a,b);this.add(this.lightSphere); +this.update()};THREE.HemisphereLightHelper.prototype=Object.create(THREE.Object3D.prototype);THREE.HemisphereLightHelper.prototype.dispose=function(){this.lightSphere.geometry.dispose();this.lightSphere.material.dispose()}; +THREE.HemisphereLightHelper.prototype.update=function(){var a=new THREE.Vector3;return function(){this.colors[0].copy(this.light.color).multiplyScalar(this.light.intensity);this.colors[1].copy(this.light.groundColor).multiplyScalar(this.light.intensity);this.lightSphere.lookAt(a.setFromMatrixPosition(this.light.matrixWorld).negate());this.lightSphere.geometry.colorsNeedUpdate=!0}}(); +THREE.PointLightHelper=function(a,b){this.light=a;this.light.updateMatrixWorld();var c=new THREE.SphereGeometry(b,4,2),d=new THREE.MeshBasicMaterial({wireframe:!0,fog:!1});d.color.copy(this.light.color).multiplyScalar(this.light.intensity);THREE.Mesh.call(this,c,d);this.matrix=this.light.matrixWorld;this.matrixAutoUpdate=!1};THREE.PointLightHelper.prototype=Object.create(THREE.Mesh.prototype);THREE.PointLightHelper.prototype.dispose=function(){this.geometry.dispose();this.material.dispose()}; +THREE.PointLightHelper.prototype.update=function(){this.material.color.copy(this.light.color).multiplyScalar(this.light.intensity)}; +THREE.SkeletonHelper=function(a){this.bones=this.getBoneList(a);for(var b=new THREE.Geometry,c=0;cs;s++){d[0]=t[g[s]];d[1]=t[g[(s+1)%3]];d.sort(f);var u=d.toString();void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++)}d=new Float32Array(6*p);m=0;for(r=p;ms;s++)p= +k[q[2*m+s]],g=6*m+3*s,d[g+0]=p.x,d[g+1]=p.y,d[g+2]=p.z;h.addAttribute("position",new THREE.BufferAttribute(d,3))}else if(a.geometry instanceof THREE.BufferGeometry){if(void 0!==a.geometry.attributes.index){k=a.geometry.attributes.position.array;r=a.geometry.attributes.index.array;n=a.geometry.drawcalls;p=0;0===n.length&&(n=[{count:r.length,index:0,start:0}]);for(var q=new Uint32Array(2*r.length),t=0,v=n.length;ts;s++)d[0]= +g+r[m+s],d[1]=g+r[m+(s+1)%3],d.sort(f),u=d.toString(),void 0===e[u]&&(q[2*p]=d[0],q[2*p+1]=d[1],e[u]=!0,p++);d=new Float32Array(6*p);m=0;for(r=p;ms;s++)g=6*m+3*s,p=3*q[2*m+s],d[g+0]=k[p],d[g+1]=k[p+1],d[g+2]=k[p+2]}else for(k=a.geometry.attributes.position.array,p=k.length/3,q=p/3,d=new Float32Array(6*p),m=0,r=q;ms;s++)g=18*m+6*s,q=9*m+3*s,d[g+0]=k[q],d[g+1]=k[q+1],d[g+2]=k[q+2],p=9*m+(s+1)%3*3,d[g+3]=k[p],d[g+4]=k[p+1],d[g+5]=k[p+2];h.addAttribute("position",new THREE.BufferAttribute(d, +3))}THREE.Line.call(this,h,new THREE.LineBasicMaterial({color:c}),THREE.LinePieces);this.matrix=a.matrixWorld;this.matrixAutoUpdate=!1};THREE.WireframeHelper.prototype=Object.create(THREE.Line.prototype);THREE.ImmediateRenderObject=function(){THREE.Object3D.call(this);this.render=function(a){}};THREE.ImmediateRenderObject.prototype=Object.create(THREE.Object3D.prototype); +THREE.MorphBlendMesh=function(a,b){THREE.Mesh.call(this,a,b);this.animationsMap={};this.animationsList=[];var c=this.geometry.morphTargets.length;this.createAnimation("__default",0,c-1,c/1);this.setAnimationWeight("__default",1)};THREE.MorphBlendMesh.prototype=Object.create(THREE.Mesh.prototype); +THREE.MorphBlendMesh.prototype.createAnimation=function(a,b,c,d){b={startFrame:b,endFrame:c,length:c-b+1,fps:d,duration:(c-b)/d,lastFrame:0,currentFrame:0,active:!1,time:0,direction:1,weight:1,directionBackwards:!1,mirroredLoop:!1};this.animationsMap[a]=b;this.animationsList.push(b)}; +THREE.MorphBlendMesh.prototype.autoCreateAnimations=function(a){for(var b=/([a-z]+)_?(\d+)/,c,d={},e=this.geometry,f=0,g=e.morphTargets.length;fh.end&&(h.end=f);c||(c=k)}}for(k in d)h=d[k],this.createAnimation(k,h.start,h.end,a);this.firstAnimation=c}; +THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=this.animationsMap[a])a.direction=1,a.directionBackwards=!1};THREE.MorphBlendMesh.prototype.setAnimationDirectionBackward=function(a){if(a=this.animationsMap[a])a.direction=-1,a.directionBackwards=!0};THREE.MorphBlendMesh.prototype.setAnimationFPS=function(a,b){var c=this.animationsMap[a];c&&(c.fps=b,c.duration=(c.end-c.start)/c.fps)}; +THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b}; +THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):console.warn("animation["+a+"] undefined")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1}; +THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;bd.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g=d.weight; +f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; diff --git a/plugins/Sidebar/media-globe/world.jpg b/plugins/Sidebar/media-globe/world.jpg new file mode 100644 index 00000000..222bd939 Binary files /dev/null and b/plugins/Sidebar/media-globe/world.jpg differ diff --git a/plugins/Sidebar/media/Class.coffee b/plugins/Sidebar/media/Class.coffee new file mode 100644 index 00000000..d62ab25c --- /dev/null +++ b/plugins/Sidebar/media/Class.coffee @@ -0,0 +1,23 @@ +class Class + trace: true + + log: (args...) -> + return unless @trace + return if typeof console is 'undefined' + args.unshift("[#{@.constructor.name}]") + console.log(args...) + @ + + logStart: (name, args...) -> + return unless @trace + @logtimers or= {} + @logtimers[name] = +(new Date) + @log "#{name}", args..., "(started)" if args.length > 0 + @ + + logEnd: (name, args...) -> + ms = +(new Date)-@logtimers[name] + @log "#{name}", args..., "(Done in #{ms}ms)" + @ + +window.Class = Class \ No newline at end of file diff --git a/plugins/Sidebar/media/RateLimit.coffee b/plugins/Sidebar/media/RateLimit.coffee new file mode 100644 index 00000000..17c67433 --- /dev/null +++ b/plugins/Sidebar/media/RateLimit.coffee @@ -0,0 +1,14 @@ +limits = {} +call_after_interval = {} +window.RateLimit = (interval, fn) -> + if not limits[fn] + call_after_interval[fn] = false + fn() # First call is not delayed + limits[fn] = setTimeout (-> + if call_after_interval[fn] + fn() + delete limits[fn] + delete call_after_interval[fn] + ), interval + else # Called within iterval, delay the call + call_after_interval[fn] = true diff --git a/plugins/Sidebar/media/Scrollable.js b/plugins/Sidebar/media/Scrollable.js new file mode 100644 index 00000000..689a5719 --- /dev/null +++ b/plugins/Sidebar/media/Scrollable.js @@ -0,0 +1,91 @@ +/* via http://jsfiddle.net/elGrecode/00dgurnn/ */ + +window.initScrollable = function () { + + var scrollContainer = document.querySelector('.scrollable'), + scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'), + scrollContent = document.querySelector('.scrollable .content'), + contentPosition = 0, + scrollerBeingDragged = false, + scroller, + topPosition, + scrollerHeight; + + function calculateScrollerHeight() { + // *Calculation of how tall scroller should be + var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight; + if (visibleRatio == 1) + scroller.style.display = "none"; + else + scroller.style.display = "block"; + return visibleRatio * scrollContainer.offsetHeight; + } + + function moveScroller(evt) { + // Move Scroll bar to top offset + var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight; + topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box + scroller.style.top = topPosition + 'px'; + } + + function startDrag(evt) { + normalizedPosition = evt.pageY; + contentPosition = scrollContentWrapper.scrollTop; + scrollerBeingDragged = true; + window.addEventListener('mousemove', scrollBarScroll); + return false; + } + + function stopDrag(evt) { + scrollerBeingDragged = false; + window.removeEventListener('mousemove', scrollBarScroll); + } + + function scrollBarScroll(evt) { + if (scrollerBeingDragged === true) { + evt.preventDefault(); + var mouseDifferential = evt.pageY - normalizedPosition; + var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight); + scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent; + } + } + + function updateHeight() { + scrollerHeight = calculateScrollerHeight() - 10; + scroller.style.height = scrollerHeight + 'px'; + } + + function createScroller() { + // *Creates scroller element and appends to '.scrollable' div + // create scroller element + scroller = document.createElement("div"); + scroller.className = 'scroller'; + + // determine how big scroller should be based on content + scrollerHeight = calculateScrollerHeight() - 10; + + if (scrollerHeight / scrollContainer.offsetHeight < 1) { + // *If there is a need to have scroll bar based on content size + scroller.style.height = scrollerHeight + 'px'; + + // append scroller to scrollContainer div + scrollContainer.appendChild(scroller); + + // show scroll path divot + scrollContainer.className += ' showScroll'; + + // attach related draggable listeners + scroller.addEventListener('mousedown', startDrag); + window.addEventListener('mouseup', stopDrag); + } + + } + + createScroller(); + + + // *** Listeners *** + scrollContentWrapper.addEventListener('scroll', moveScroller); + + return updateHeight; +}; \ No newline at end of file diff --git a/plugins/Sidebar/media/Scrollbable.css b/plugins/Sidebar/media/Scrollbable.css new file mode 100644 index 00000000..b11faea0 --- /dev/null +++ b/plugins/Sidebar/media/Scrollbable.css @@ -0,0 +1,44 @@ +.scrollable { + overflow: hidden; +} + +.scrollable.showScroll::after { + position: absolute; + content: ''; + top: 5%; + right: 7px; + height: 90%; + width: 3px; + background: rgba(224, 224, 255, .3); +} + +.scrollable .content-wrapper { + width: 100%; + height: 100%; + padding-right: 50%; + overflow-y: scroll; +} +.scroller { + margin-top: 5px; + z-index: 5; + cursor: pointer; + position: absolute; + width: 7px; + border-radius: 5px; + background: #151515; + top: 0px; + left: 395px; + -webkit-transition: top .08s; + -moz-transition: top .08s; + -ms-transition: top .08s; + -o-transition: top .08s; + transition: top .08s; +} +.scroller { + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} \ No newline at end of file diff --git a/plugins/Sidebar/media/Sidebar.coffee b/plugins/Sidebar/media/Sidebar.coffee new file mode 100644 index 00000000..db0bfb5e --- /dev/null +++ b/plugins/Sidebar/media/Sidebar.coffee @@ -0,0 +1,427 @@ +class Sidebar extends Class + constructor: -> + @tag = null + @container = null + @opened = false + @width = 410 + @fixbutton = $(".fixbutton") + @fixbutton_addx = 0 + @fixbutton_initx = 0 + @fixbutton_targetx = 0 + @page_width = $(window).width() + @frame = $("#inner-iframe") + @initFixbutton() + @dragStarted = 0 + @globe = null + @preload_html = null + + @original_set_site_info = wrapper.setSiteInfo # We going to override this, save the original + + # Start in opened state for debugging + if false + @startDrag() + @moved() + @fixbutton_targetx = @fixbutton_initx - @width + @stopDrag() + + + initFixbutton: -> + ### + @fixbutton.on "mousedown touchstart", (e) => + if not @opened + @logStart("Preloading") + wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) => + @logEnd("Preloading") + @preload_html = res + ### + + # Detect dragging + @fixbutton.on "mousedown touchstart", (e) => + if e.button > 0 # Right or middle click + return + e.preventDefault() + + # Disable previous listeners + @fixbutton.off "click touchstop touchcancel" + @fixbutton.off "mousemove touchmove" + + # Make sure its not a click + @dragStarted = (+ new Date) + @fixbutton.one "mousemove touchmove", (e) => + mousex = e.pageX + if not mousex + mousex = e.originalEvent.touches[0].pageX + + @fixbutton_addx = @fixbutton.offset().left-mousex + @startDrag() + @fixbutton.parent().on "click touchstop touchcancel", (e) => + @stopDrag() + @resized() + $(window).on "resize", @resized + + resized: => + @page_width = $(window).width() + @fixbutton_initx = @page_width - 75 # Initial x position + if @opened + @fixbutton.css + left: @fixbutton_initx - @width + else + @fixbutton.css + left: @fixbutton_initx + + # Start dragging the fixbutton + startDrag: -> + @log "startDrag" + @fixbutton_targetx = @fixbutton_initx # Fallback x position + + @fixbutton.addClass("dragging") + + # Fullscreen drag bg to capture mouse events over iframe + $("
    ").appendTo(document.body) + + # IE position wrap fix + if navigator.userAgent.indexOf('MSIE') != -1 or navigator.appVersion.indexOf('Trident/') > 0 + @fixbutton.css("pointer-events", "none") + + # Don't go to homepage + @fixbutton.one "click", (e) => + @stopDrag() + @fixbutton.removeClass("dragging") + if Math.abs(@fixbutton.offset().left - @fixbutton_initx) > 5 + # If moved more than some pixel the button then don't go to homepage + e.preventDefault() + + # Animate drag + @fixbutton.parents().on "mousemove touchmove", @animDrag + @fixbutton.parents().on "mousemove touchmove" ,@waitMove + + # Stop dragging listener + @fixbutton.parents().on "mouseup touchstop touchend touchcancel", (e) => + e.preventDefault() + @stopDrag() + + + # Wait for moving the fixbutton + waitMove: (e) => + if Math.abs(@fixbutton.offset().left - @fixbutton_targetx) > 10 and (+ new Date)-@dragStarted > 100 + @moved() + @fixbutton.parents().off "mousemove touchmove" ,@waitMove + + moved: -> + @log "Moved" + @createHtmltag() + $(document.body).css("perspective", "1000px").addClass("body-sidebar") + $(window).off "resize" + $(window).on "resize", => + $(document.body).css "height", $(window).height() + @scrollable() + @resized() + $(window).trigger "resize" + + # Override setsiteinfo to catch changes + wrapper.setSiteInfo = (site_info) => + @setSiteInfo(site_info) + @original_set_site_info.apply(wrapper, arguments) + + # Preload world.jpg + img = new Image(); + img.src = "/uimedia/globe/world.jpg"; + + setSiteInfo: (site_info) -> + RateLimit 1500, => + @updateHtmlTag() + RateLimit 30000, => + @displayGlobe() + + # Create the sidebar html tag + createHtmltag: -> + @when_loaded = $.Deferred() + if not @container + @container = $(""" + + """) + @container.appendTo(document.body) + @tag = @container.find(".sidebar") + @updateHtmlTag() + @scrollable = window.initScrollable() + + + updateHtmlTag: -> + if @preload_html + @setHtmlTag(@preload_html) + @preload_html = null + else + wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag + + setHtmlTag: (res) => + if @tag.find(".content").children().length == 0 # First update + @log "Creating content" + @container.addClass("loaded") + morphdom(@tag.find(".content")[0], '
    '+res+'
    ') + # @scrollable() + @when_loaded.resolve() + + else # Not first update, patch the html to keep unchanged dom elements + @log "Patching content" + morphdom @tag.find(".content")[0], '
    '+res+'
    ', { + onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state + if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0 + return false + else + return true + } + + + animDrag: (e) => + mousex = e.pageX + if not mousex + mousex = e.originalEvent.touches[0].pageX + + overdrag = @fixbutton_initx-@width-mousex + if overdrag > 0 # Overdragged + overdrag_percent = 1+overdrag/300 + mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent) + targetx = @fixbutton_initx-mousex-@fixbutton_addx + + @fixbutton[0].style.left = (mousex+@fixbutton_addx)+"px" + + if @tag + @tag[0].style.transform = "translateX(#{0-targetx}px)" + + # Check if opened + if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9) + @fixbutton_targetx = @fixbutton_initx - @width # Make it opened + else + @fixbutton_targetx = @fixbutton_initx + + + # Stop dragging the fixbutton + stopDrag: -> + @fixbutton.parents().off "mousemove touchmove" + @fixbutton.off "mousemove touchmove" + @fixbutton.css("pointer-events", "") + $(".drag-bg").remove() + if not @fixbutton.hasClass("dragging") + return + @fixbutton.removeClass("dragging") + + # Move back to initial position + if @fixbutton_targetx != @fixbutton.offset().left + # Animate fixbutton + @fixbutton.stop().animate {"left": @fixbutton_targetx}, 500, "easeOutBack", => + # Switch back to auto align + if @fixbutton_targetx == @fixbutton_initx # Closed + @fixbutton.css("left", "auto") + else # Opened + @fixbutton.css("left", @fixbutton_targetx) + + $(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status + + # Animate sidebar and iframe + if @fixbutton_targetx == @fixbutton_initx + # Closed + targetx = 0 + @opened = false + else + # Opened + targetx = @width + if not @opened + @when_loaded.done => + @onOpened() + @opened = true + + # Revent sidebar transitions + if @tag + @tag.css("transition", "0.4s ease-out") + @tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, => + @tag.css("transition", "") + if not @opened + @container.remove() + @container = null + @tag.remove() + @tag = null + + # Revert body transformations + @log "stopdrag", "opened:", @opened + if not @opened + @onClosed() + + + onOpened: -> + @log "Opened" + @scrollable() + + # Re-calculate height when site admin opened or closed + @tag.find("#checkbox-owned").off("click").on "click", => + setTimeout (=> + @scrollable() + ), 300 + + # Site limit button + @tag.find("#button-sitelimit").off("click").on "click", => + wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), (res) => + if res == "ok" + wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000 + @updateHtmlTag() + return false + + # Database reload + @tag.find("#button-dbreload").off("click").on "click", => + wrapper.ws.cmd "dbReload", [], => + wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000 + @updateHtmlTag() + return false + + # Database rebuild + @tag.find("#button-dbrebuild").off("click").on "click", => + wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...." + wrapper.ws.cmd "dbRebuild", [], => + wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000 + @updateHtmlTag() + return false + + # Update site + @tag.find("#button-update").off("click").on "click", => + @tag.find("#button-update").addClass("loading") + wrapper.ws.cmd "siteUpdate", wrapper.site_info.address, => + wrapper.notifications.add "done-updated", "done", "Site updated!", 5000 + @tag.find("#button-update").removeClass("loading") + return false + + # Pause site + @tag.find("#button-pause").off("click").on "click", => + @tag.find("#button-pause").addClass("hidden") + wrapper.ws.cmd "sitePause", wrapper.site_info.address + return false + + # Resume site + @tag.find("#button-resume").off("click").on "click", => + @tag.find("#button-resume").addClass("hidden") + wrapper.ws.cmd "siteResume", wrapper.site_info.address + return false + + # Delete site + @tag.find("#button-delete").off("click").on "click", => + wrapper.displayConfirm "Are you sure?", "Delete this site", => + @tag.find("#button-delete").addClass("loading") + wrapper.ws.cmd "siteDelete", wrapper.site_info.address, -> + document.location = $(".fixbutton-bg").attr("href") + return false + + # Owned checkbox + @tag.find("#checkbox-owned").off("click").on "click", => + wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] + + # Owned checkbox + @tag.find("#checkbox-autodownloadoptional").off("click").on "click", => + wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")] + + # Change identity button + @tag.find("#button-identity").off("click").on "click", => + wrapper.ws.cmd "certSelect" + return false + + # Owned checkbox + @tag.find("#checkbox-owned").off("click").on "click", => + wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] + + # Save settings + @tag.find("#button-settings").off("click").on "click", => + wrapper.ws.cmd "fileGet", "content.json", (res) => + data = JSON.parse(res) + data["title"] = $("#settings-title").val() + data["description"] = $("#settings-description").val() + json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t'))) + wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) => + if res != "ok" # fileWrite failed + wrapper.notifications.add "file-write", "error", "File write error: #{res}" + else + wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000 + @updateHtmlTag() + return false + + # Sign content.json + @tag.find("#button-sign").off("click").on "click", => + inner_path = @tag.find("#input-contents").val() + + if wrapper.site_info.privatekey + # Privatekey stored in users.json + wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) => + wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + + else + # Ask the user for privatekey + wrapper.displayPrompt "Enter your private key:", "password", "Sign", (privatekey) => # Prompt the private key + wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) => + if res == "ok" + wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + + return false + + # Publish content.json + @tag.find("#button-publish").off("click").on "click", => + inner_path = @tag.find("#input-contents").val() + @tag.find("#button-publish").addClass "loading" + wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, => + @tag.find("#button-publish").removeClass "loading" + + @loadGlobe() + + + onClosed: -> + $(window).off "resize" + $(window).on "resize", @resized + $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) => + if e.target == document.body + $(document.body).css("height", "auto").css("perspective", "").css("transition", "").off transitionEnd + @unloadGlobe() + + # We dont need site info anymore + wrapper.setSiteInfo = @original_set_site_info + + + loadGlobe: => + console.log "loadGlobe", @tag.find(".globe").hasClass("loading") + if @tag.find(".globe").hasClass("loading") + setTimeout (=> + if typeof(DAT) == "undefined" # Globe script not loaded, do it first + $.getScript("/uimedia/globe/all.js", @displayGlobe) + else + @displayGlobe() + ), 600 + + + displayGlobe: => + img = new Image(); + img.src = "/uimedia/globe/world.jpg"; + img.onload = => + wrapper.ws.cmd "sidebarGetPeers", [], (globe_data) => + if @globe + @globe.scene.remove(@globe.points) + @globe.addData( globe_data, {format: 'magnitude', name: "hello", animated: false} ) + @globe.createPoints() + else if typeof(DAT) != "undefined" + try + @globe = new DAT.Globe( @tag.find(".globe")[0], {"imgDir": "/uimedia/globe/"} ) + @globe.addData( globe_data, {format: 'magnitude', name: "hello"} ) + @globe.createPoints() + @globe.animate() + catch e + console.log "WebGL error", e + @tag?.find(".globe").addClass("error").text("WebGL not supported") + + @tag?.find(".globe").removeClass("loading") + + + unloadGlobe: => + if not @globe + return false + @globe.unload() + @globe = null + + +setTimeout ( -> + window.sidebar = new Sidebar() +), 500 +window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend' diff --git a/plugins/Sidebar/media/Sidebar.css b/plugins/Sidebar/media/Sidebar.css new file mode 100644 index 00000000..bf915baa --- /dev/null +++ b/plugins/Sidebar/media/Sidebar.css @@ -0,0 +1,124 @@ +.drag-bg { width: 100%; height: 100%; position: fixed; } +.fixbutton.dragging { cursor: -webkit-grabbing; } +.fixbutton-bg:active { cursor: -webkit-grabbing; } + + +.body-sidebar { background-color: #666 !important; } +#inner-iframe { transition: 0.3s ease-in-out; transform-origin: left; outline: 1px solid transparent; } +.body-sidebar iframe { transform: rotateY(5deg); opacity: 0.8; pointer-events: none } /* translateX(-200px) scale(0.95)*/ + +/* SIDEBAR */ + +.sidebar-container { width: 100%; height: 100%; overflow: hidden; position: fixed; } +.sidebar { background-color: #212121; position: fixed; backface-visibility: hidden; right: -1200px; height: 100%; width: 1200px; } /*box-shadow: inset 0px 0px 10px #000*/ +.sidebar .content { margin: 30px; font-family: "Segoe UI Light", "Segoe UI", "Helvetica Neue"; color: white; width: 375px; height: 300px; font-weight: 200; transition: all 1s; opacity: 0 } +.sidebar-container.loaded .content { opacity: 1; transform: none } +.sidebar h1, .sidebar h2 { font-weight: lighter; } +.sidebar .button { margin: 0px; display: inline-block; transition: all 0.3s; box-sizing: border-box; max-width: 260px } +.sidebar .button.hidden { padding: 0px; max-width: 0px; opacity: 0; pointer-events: none } +.sidebar #button-delete { background-color: transparent; border: 1px solid #333; color: #AAA; margin-left: 10px } +.sidebar #button-delete:hover { border: 1px solid #666; color: white } + +.sidebar .flex { display: flex } +.sidebar .flex .input.text, .sidebar .flex input.text { width: 100%; } +.sidebar .flex .button { margin-left: 4px; white-space: nowrap; } + +/* FIELDS */ + +.sidebar .fields { padding: 0px; list-style-type: none; width: 355px; } +.sidebar .fields > li, .sidebar .fields .settings-owned > li { margin-bottom: 30px } +.sidebar .fields > li:after, .sidebar .fields .settings-owned > li:after { clear: both; content: ''; display: block } +.sidebar .fields label { + font-family: Consolas, monospace; text-transform: uppercase; font-size: 13px; color: #ACACAC; display: inline-block; margin-bottom: 10px; + vertical-align: text-bottom; margin-right: 10px; width: 100% +} +.sidebar .fields label small { font-weight: normal; color: white; text-transform: none; } +.sidebar .fields .text { background-color: black; border: 0px; padding: 10px; color: white; border-radius: 3px; width: 260px; font-family: Consolas, monospace; } +.sidebar .fields .text.long { width: 330px; font-size: 72%; } +.sidebar .fields .disabled { color: #AAA; background-color: #3B3B3B; } +.sidebar .fields .text-num { width: 30px; text-align: right; padding-right: 30px; } +.sidebar .fields .text-post { color: white; font-family: Consolas, monospace; display: inline-block; font-size: 13px; margin-left: -25px; width: 25px; } + +/* Select */ +.sidebar .fields select { + width: 225px; background-color: #3B3B3B; color: white; font-family: Consolas, monospace; appearance: none; + padding: 5px; padding-right: 25px; border: 0px; border-radius: 3px; height: 35px; vertical-align: 1px; box-shadow: 0px 1px 2px rgba(0,0,0,0.5); +} +.sidebar .fields .select-down { margin-left: -39px; width: 34px; display: inline-block; transform: rotateZ(90deg); height: 35px; vertical-align: -8px; pointer-events: none; font-weight: bold } + +/* Checkbox */ +.sidebar .fields .checkbox { width: 50px; height: 24px; position: relative; z-index: 999; opacity: 0; } +.sidebar .fields .checkbox-skin { background-color: #CCC; width: 50px; height: 24px; border-radius: 15px; transition: all 0.3s ease-in-out; display: inline-block; margin-left: -59px; } +.sidebar .fields .checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; border-radius: 100%; margin-top: 2px; margin-left: 2px; + transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); +} +.sidebar .fields .checkbox:checked ~ .checkbox-skin:before { margin-left: 27px; } +.sidebar .fields .checkbox:checked ~ .checkbox-skin { background-color: #2ECC71; } + +/* Fake input */ +.sidebar .input { font-size: 13px; width: 250px; display: inline-block; overflow: hidden; text-overflow: ellipsis; vertical-align: top } + +/* GRAPH */ + +.graph { padding: 0px; list-style-type: none; width: 351px; background-color: black; height: 10px; border-radius: 8px; overflow: hidden; position: relative; font-size: 0 } +.graph li { height: 100%; position: absolute; transition: all 0.3s; } +.graph-stacked { white-space: nowrap; } +.graph-stacked li { position: static; display: inline-block; height: 20px } + +.graph-legend { padding: 0px; list-style-type: none; margin-top: 13px; font-family: Consolas, "Andale Mono", monospace; font-size: 13px; text-transform: capitalize; } +.sidebar .graph-legend li { margin: 0px; margin-top: 5px; margin-left: 0px; width: 160px; float: left; position: relative; } +.sidebar .graph-legend li:nth-child(odd) { margin-right: 29px } +.graph-legend span { position: absolute; } +.graph-legend b { text-align: right; display: inline-block; width: 50px; float: right; font-weight: normal; } +.graph-legend li:before { content: '\2022'; font-size: 23px; line-height: 0px; vertical-align: -3px; margin-right: 5px; } + +.filelist { font-size: 12px; font-family: monospace; margin: 0px; padding: 0px; list-style-type: none; line-height: 1.5em; } +.filelist li:before { content: '\2022'; font-size: 11px; line-height: 0px; vertical-align: 0px; margin-right: 5px; color: #FFBE00; } +.filelist li { overflow: hidden; text-overflow: ellipsis; } + +/* COLORS */ + +.back-green { background-color: #2ECC71 } +.color-green:before { color: #2ECC71 } +.back-blue { background-color: #3BAFDA } +.color-blue:before { color: #3BAFDA } +.back-darkblue { background-color: #156fb7 } +.color-darkblue:before { color: #156fb7 } +.back-purple { background-color: #B10DC9 } +.color-purple:before { color: #B10DC9 } +.back-yellow { background-color: #FFDC00 } +.color-yellow:before { color: #FFDC00 } +.back-orange { background-color: #FF9800 } +.color-orange:before { color: #FF9800 } +.back-gray { background-color: #ECF0F1 } +.color-gray:before { color: #ECF0F1 } +.back-black { background-color: #34495E } +.color-black:before { color: #34495E } +.back-red { background-color: #5E4934 } +.color-red:before { color: #5E4934 } +.back-gray { background-color: #9e9e9e } +.color-gray:before { color: #9e9e9e } +.back-white { background-color: #EEE } +.color-white:before { color: #EEE } +.back-red { background-color: #E91E63 } +.color-red:before { color: #E91E63 } + + +/* Settings owned */ + +.owned-title { float: left } +#checkbox-owned { margin-bottom: 25px; margin-top: 26px; margin-left: 11px; } +.settings-owned { clear: both } +#checkbox-owned ~ .settings-owned { opacity: 0; max-height: 0px; transition: all 0.3s linear; overflow: hidden } +#checkbox-owned:checked ~ .settings-owned { opacity: 1; max-height: 400px } + +/* Globe */ +.globe { width: 360px; height: 360px } +.globe.loading { background: url(/uimedia/img/loading-circle.gif) center center no-repeat } +.globe.error { text-align: center; padding-top: 156px; box-sizing: border-box; opacity: 0.2; } + +/* Sign publish */ +.contents { background-color: #3B3B3B; color: white; padding: 7px 10px; font-family: Consolas; font-size: 11px; display: inline-block; margin-bottom: 6px; } +.contents a { color: white } +.contents a:active { background-color: #6B6B6B } \ No newline at end of file diff --git a/plugins/Sidebar/media/all.css b/plugins/Sidebar/media/all.css new file mode 100644 index 00000000..f28bd0d4 --- /dev/null +++ b/plugins/Sidebar/media/all.css @@ -0,0 +1,178 @@ + + +/* ---- plugins/Sidebar/media/Scrollbable.css ---- */ + + +.scrollable { + overflow: hidden; +} + +.scrollable.showScroll::after { + position: absolute; + content: ''; + top: 5%; + right: 7px; + height: 90%; + width: 3px; + background: rgba(224, 224, 255, .3); +} + +.scrollable .content-wrapper { + width: 100%; + height: 100%; + padding-right: 50%; + overflow-y: scroll; +} +.scroller { + margin-top: 5px; + z-index: 5; + cursor: pointer; + position: absolute; + width: 7px; + -webkit-border-radius: 5px; -moz-border-radius: 5px; -o-border-radius: 5px; -ms-border-radius: 5px; border-radius: 5px ; + background: #151515; + top: 0px; + left: 395px; + -webkit-transition: top .08s; + -moz-transition: top .08s; + -ms-transition: top .08s; + -o-transition: top .08s; + -webkit-transition: top .08s; -moz-transition: top .08s; -o-transition: top .08s; -ms-transition: top .08s; transition: top .08s ; +} +.scroller { + -webkit-touch-callout: none; + -webkit-user-select: none; + -khtml-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + + +/* ---- plugins/Sidebar/media/Sidebar.css ---- */ + + +.drag-bg { width: 100%; height: 100%; position: fixed; } +.fixbutton.dragging { cursor: -webkit-grabbing; } +.fixbutton-bg:active { cursor: -webkit-grabbing; } + + +.body-sidebar { background-color: #666 !important; } +#inner-iframe { -webkit-transition: 0.3s ease-in-out; -moz-transition: 0.3s ease-in-out; -o-transition: 0.3s ease-in-out; -ms-transition: 0.3s ease-in-out; transition: 0.3s ease-in-out ; transform-origin: left; outline: 1px solid transparent; } +.body-sidebar iframe { -webkit-transform: rotateY(5deg); -moz-transform: rotateY(5deg); -o-transform: rotateY(5deg); -ms-transform: rotateY(5deg); transform: rotateY(5deg) ; opacity: 0.8; pointer-events: none } /* translateX(-200px) scale(0.95)*/ + +/* SIDEBAR */ + +.sidebar-container { width: 100%; height: 100%; overflow: hidden; position: fixed; } +.sidebar { background-color: #212121; position: fixed; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; right: -1200px; height: 100%; width: 1200px; } /*box-shadow: inset 0px 0px 10px #000*/ +.sidebar .content { margin: 30px; font-family: "Segoe UI Light", "Segoe UI", "Helvetica Neue"; color: white; width: 375px; height: 300px; font-weight: 200; -webkit-transition: all 1s; -moz-transition: all 1s; -o-transition: all 1s; -ms-transition: all 1s; transition: all 1s ; opacity: 0 } +.sidebar-container.loaded .content { opacity: 1; -webkit-transform: none ; -moz-transform: none ; -o-transform: none ; -ms-transform: none ; transform: none } +.sidebar h1, .sidebar h2 { font-weight: lighter; } +.sidebar .button { margin: 0px; display: inline-block; -webkit-transition: all 0.3s; -moz-transition: all 0.3s; -o-transition: all 0.3s; -ms-transition: all 0.3s; transition: all 0.3s ; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; max-width: 260px } +.sidebar .button.hidden { padding: 0px; max-width: 0px; opacity: 0; pointer-events: none } +.sidebar #button-delete { background-color: transparent; border: 1px solid #333; color: #AAA; margin-left: 10px } +.sidebar #button-delete:hover { border: 1px solid #666; color: white } + +.sidebar .flex { display: flex } +.sidebar .flex .input.text, .sidebar .flex input.text { width: 100%; } +.sidebar .flex .button { margin-left: 4px; white-space: nowrap; } + +/* FIELDS */ + +.sidebar .fields { padding: 0px; list-style-type: none; width: 355px; } +.sidebar .fields > li, .sidebar .fields .settings-owned > li { margin-bottom: 30px } +.sidebar .fields > li:after, .sidebar .fields .settings-owned > li:after { clear: both; content: ''; display: block } +.sidebar .fields label { + font-family: Consolas, monospace; text-transform: uppercase; font-size: 13px; color: #ACACAC; display: inline-block; margin-bottom: 10px; + vertical-align: text-bottom; margin-right: 10px; width: 100% +} +.sidebar .fields label small { font-weight: normal; color: white; text-transform: none; } +.sidebar .fields .text { background-color: black; border: 0px; padding: 10px; color: white; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; width: 260px; font-family: Consolas, monospace; } +.sidebar .fields .text.long { width: 330px; font-size: 72%; } +.sidebar .fields .disabled { color: #AAA; background-color: #3B3B3B; } +.sidebar .fields .text-num { width: 30px; text-align: right; padding-right: 30px; } +.sidebar .fields .text-post { color: white; font-family: Consolas, monospace; display: inline-block; font-size: 13px; margin-left: -25px; width: 25px; } + +/* Select */ +.sidebar .fields select { + width: 225px; background-color: #3B3B3B; color: white; font-family: Consolas, monospace; -webkit-appearance: none; -moz-appearance: none; -o-appearance: none; -ms-appearance: none; appearance: none ; + padding: 5px; padding-right: 25px; border: 0px; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; height: 35px; vertical-align: 1px; -webkit-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -moz-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -o-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); -ms-box-shadow: 0px 1px 2px rgba(0,0,0,0.5); box-shadow: 0px 1px 2px rgba(0,0,0,0.5) ; +} +.sidebar .fields .select-down { margin-left: -39px; width: 34px; display: inline-block; -webkit-transform: rotateZ(90deg); -moz-transform: rotateZ(90deg); -o-transform: rotateZ(90deg); -ms-transform: rotateZ(90deg); transform: rotateZ(90deg) ; height: 35px; vertical-align: -8px; pointer-events: none; font-weight: bold } + +/* Checkbox */ +.sidebar .fields .checkbox { width: 50px; height: 24px; position: relative; z-index: 999; opacity: 0; } +.sidebar .fields .checkbox-skin { background-color: #CCC; width: 50px; height: 24px; -webkit-border-radius: 15px; -moz-border-radius: 15px; -o-border-radius: 15px; -ms-border-radius: 15px; border-radius: 15px ; -webkit-transition: all 0.3s ease-in-out; -moz-transition: all 0.3s ease-in-out; -o-transition: all 0.3s ease-in-out; -ms-transition: all 0.3s ease-in-out; transition: all 0.3s ease-in-out ; display: inline-block; margin-left: -59px; } +.sidebar .fields .checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; -webkit-border-radius: 100%; -moz-border-radius: 100%; -o-border-radius: 100%; -ms-border-radius: 100%; border-radius: 100% ; margin-top: 2px; margin-left: 2px; + -webkit-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -moz-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -o-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -ms-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86) ; +} +.sidebar .fields .checkbox:checked ~ .checkbox-skin:before { margin-left: 27px; } +.sidebar .fields .checkbox:checked ~ .checkbox-skin { background-color: #2ECC71; } + +/* Fake input */ +.sidebar .input { font-size: 13px; width: 250px; display: inline-block; overflow: hidden; text-overflow: ellipsis; vertical-align: top } + +/* GRAPH */ + +.graph { padding: 0px; list-style-type: none; width: 351px; background-color: black; height: 10px; -webkit-border-radius: 8px; -moz-border-radius: 8px; -o-border-radius: 8px; -ms-border-radius: 8px; border-radius: 8px ; overflow: hidden; position: relative; font-size: 0 } +.graph li { height: 100%; position: absolute; -webkit-transition: all 0.3s; -moz-transition: all 0.3s; -o-transition: all 0.3s; -ms-transition: all 0.3s; transition: all 0.3s ; } +.graph-stacked { white-space: nowrap; } +.graph-stacked li { position: static; display: inline-block; height: 20px } + +.graph-legend { padding: 0px; list-style-type: none; margin-top: 13px; font-family: Consolas, "Andale Mono", monospace; font-size: 13px; text-transform: capitalize; } +.sidebar .graph-legend li { margin: 0px; margin-top: 5px; margin-left: 0px; width: 160px; float: left; position: relative; } +.sidebar .graph-legend li:nth-child(odd) { margin-right: 29px } +.graph-legend span { position: absolute; } +.graph-legend b { text-align: right; display: inline-block; width: 50px; float: right; font-weight: normal; } +.graph-legend li:before { content: '\2022'; font-size: 23px; line-height: 0px; vertical-align: -3px; margin-right: 5px; } + +.filelist { font-size: 12px; font-family: monospace; margin: 0px; padding: 0px; list-style-type: none; line-height: 1.5em; } +.filelist li:before { content: '\2022'; font-size: 11px; line-height: 0px; vertical-align: 0px; margin-right: 5px; color: #FFBE00; } +.filelist li { overflow: hidden; text-overflow: ellipsis; } + +/* COLORS */ + +.back-green { background-color: #2ECC71 } +.color-green:before { color: #2ECC71 } +.back-blue { background-color: #3BAFDA } +.color-blue:before { color: #3BAFDA } +.back-darkblue { background-color: #156fb7 } +.color-darkblue:before { color: #156fb7 } +.back-purple { background-color: #B10DC9 } +.color-purple:before { color: #B10DC9 } +.back-yellow { background-color: #FFDC00 } +.color-yellow:before { color: #FFDC00 } +.back-orange { background-color: #FF9800 } +.color-orange:before { color: #FF9800 } +.back-gray { background-color: #ECF0F1 } +.color-gray:before { color: #ECF0F1 } +.back-black { background-color: #34495E } +.color-black:before { color: #34495E } +.back-red { background-color: #5E4934 } +.color-red:before { color: #5E4934 } +.back-gray { background-color: #9e9e9e } +.color-gray:before { color: #9e9e9e } +.back-white { background-color: #EEE } +.color-white:before { color: #EEE } +.back-red { background-color: #E91E63 } +.color-red:before { color: #E91E63 } + + +/* Settings owned */ + +.owned-title { float: left } +#checkbox-owned { margin-bottom: 25px; margin-top: 26px; margin-left: 11px; } +.settings-owned { clear: both } +#checkbox-owned ~ .settings-owned { opacity: 0; max-height: 0px; -webkit-transition: all 0.3s linear; -moz-transition: all 0.3s linear; -o-transition: all 0.3s linear; -ms-transition: all 0.3s linear; transition: all 0.3s linear ; overflow: hidden } +#checkbox-owned:checked ~ .settings-owned { opacity: 1; max-height: 400px } + +/* Globe */ +.globe { width: 360px; height: 360px } +.globe.loading { background: url(/uimedia/img/loading-circle.gif) center center no-repeat } +.globe.error { text-align: center; padding-top: 156px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; opacity: 0.2; } + +/* Sign publish */ +.contents { background-color: #3B3B3B; color: white; padding: 7px 10px; font-family: Consolas; font-size: 11px; display: inline-block; margin-bottom: 6px; } +.contents a { color: white } +.contents a:active { background-color: #6B6B6B } \ No newline at end of file diff --git a/plugins/Sidebar/media/all.js b/plugins/Sidebar/media/all.js new file mode 100644 index 00000000..bf05eb49 --- /dev/null +++ b/plugins/Sidebar/media/all.js @@ -0,0 +1,1067 @@ + + +/* ---- plugins/Sidebar/media/Class.coffee ---- */ + + +(function() { + var Class, + __slice = [].slice; + + Class = (function() { + function Class() {} + + Class.prototype.trace = true; + + Class.prototype.log = function() { + var args; + args = 1 <= arguments.length ? __slice.call(arguments, 0) : []; + if (!this.trace) { + return; + } + if (typeof console === 'undefined') { + return; + } + args.unshift("[" + this.constructor.name + "]"); + console.log.apply(console, args); + return this; + }; + + Class.prototype.logStart = function() { + var args, name; + name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : []; + if (!this.trace) { + return; + } + this.logtimers || (this.logtimers = {}); + this.logtimers[name] = +(new Date); + if (args.length > 0) { + this.log.apply(this, ["" + name].concat(__slice.call(args), ["(started)"])); + } + return this; + }; + + Class.prototype.logEnd = function() { + var args, ms, name; + name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : []; + ms = +(new Date) - this.logtimers[name]; + this.log.apply(this, ["" + name].concat(__slice.call(args), ["(Done in " + ms + "ms)"])); + return this; + }; + + return Class; + + })(); + + window.Class = Class; + +}).call(this); + + +/* ---- plugins/Sidebar/media/RateLimit.coffee ---- */ + + +(function() { + var call_after_interval, limits; + + limits = {}; + + call_after_interval = {}; + + window.RateLimit = function(interval, fn) { + if (!limits[fn]) { + call_after_interval[fn] = false; + fn(); + return limits[fn] = setTimeout((function() { + if (call_after_interval[fn]) { + fn(); + } + delete limits[fn]; + return delete call_after_interval[fn]; + }), interval); + } else { + return call_after_interval[fn] = true; + } + }; + +}).call(this); + + +/* ---- plugins/Sidebar/media/Scrollable.js ---- */ + + +/* via http://jsfiddle.net/elGrecode/00dgurnn/ */ + +window.initScrollable = function () { + + var scrollContainer = document.querySelector('.scrollable'), + scrollContentWrapper = document.querySelector('.scrollable .content-wrapper'), + scrollContent = document.querySelector('.scrollable .content'), + contentPosition = 0, + scrollerBeingDragged = false, + scroller, + topPosition, + scrollerHeight; + + function calculateScrollerHeight() { + // *Calculation of how tall scroller should be + var visibleRatio = scrollContainer.offsetHeight / scrollContentWrapper.scrollHeight; + if (visibleRatio == 1) + scroller.style.display = "none"; + else + scroller.style.display = "block"; + return visibleRatio * scrollContainer.offsetHeight; + } + + function moveScroller(evt) { + // Move Scroll bar to top offset + var scrollPercentage = evt.target.scrollTop / scrollContentWrapper.scrollHeight; + topPosition = scrollPercentage * (scrollContainer.offsetHeight - 5); // 5px arbitrary offset so scroll bar doesn't move too far beyond content wrapper bounding box + scroller.style.top = topPosition + 'px'; + } + + function startDrag(evt) { + normalizedPosition = evt.pageY; + contentPosition = scrollContentWrapper.scrollTop; + scrollerBeingDragged = true; + window.addEventListener('mousemove', scrollBarScroll); + return false; + } + + function stopDrag(evt) { + scrollerBeingDragged = false; + window.removeEventListener('mousemove', scrollBarScroll); + } + + function scrollBarScroll(evt) { + if (scrollerBeingDragged === true) { + evt.preventDefault(); + var mouseDifferential = evt.pageY - normalizedPosition; + var scrollEquivalent = mouseDifferential * (scrollContentWrapper.scrollHeight / scrollContainer.offsetHeight); + scrollContentWrapper.scrollTop = contentPosition + scrollEquivalent; + } + } + + function updateHeight() { + scrollerHeight = calculateScrollerHeight() - 10; + scroller.style.height = scrollerHeight + 'px'; + } + + function createScroller() { + // *Creates scroller element and appends to '.scrollable' div + // create scroller element + scroller = document.createElement("div"); + scroller.className = 'scroller'; + + // determine how big scroller should be based on content + scrollerHeight = calculateScrollerHeight() - 10; + + if (scrollerHeight / scrollContainer.offsetHeight < 1) { + // *If there is a need to have scroll bar based on content size + scroller.style.height = scrollerHeight + 'px'; + + // append scroller to scrollContainer div + scrollContainer.appendChild(scroller); + + // show scroll path divot + scrollContainer.className += ' showScroll'; + + // attach related draggable listeners + scroller.addEventListener('mousedown', startDrag); + window.addEventListener('mouseup', stopDrag); + } + + } + + createScroller(); + + + // *** Listeners *** + scrollContentWrapper.addEventListener('scroll', moveScroller); + + return updateHeight; +}; + + +/* ---- plugins/Sidebar/media/Sidebar.coffee ---- */ + + +(function() { + var Sidebar, + bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; }, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + + Sidebar = (function(superClass) { + extend(Sidebar, superClass); + + function Sidebar() { + this.unloadGlobe = bind(this.unloadGlobe, this); + this.displayGlobe = bind(this.displayGlobe, this); + this.loadGlobe = bind(this.loadGlobe, this); + this.animDrag = bind(this.animDrag, this); + this.setHtmlTag = bind(this.setHtmlTag, this); + this.waitMove = bind(this.waitMove, this); + this.resized = bind(this.resized, this); + this.tag = null; + this.container = null; + this.opened = false; + this.width = 410; + this.fixbutton = $(".fixbutton"); + this.fixbutton_addx = 0; + this.fixbutton_initx = 0; + this.fixbutton_targetx = 0; + this.page_width = $(window).width(); + this.frame = $("#inner-iframe"); + this.initFixbutton(); + this.dragStarted = 0; + this.globe = null; + this.preload_html = null; + this.original_set_site_info = wrapper.setSiteInfo; + if (false) { + this.startDrag(); + this.moved(); + this.fixbutton_targetx = this.fixbutton_initx - this.width; + this.stopDrag(); + } + } + + Sidebar.prototype.initFixbutton = function() { + + /* + @fixbutton.on "mousedown touchstart", (e) => + if not @opened + @logStart("Preloading") + wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) => + @logEnd("Preloading") + @preload_html = res + */ + this.fixbutton.on("mousedown touchstart", (function(_this) { + return function(e) { + if (e.button > 0) { + return; + } + e.preventDefault(); + _this.fixbutton.off("click touchstop touchcancel"); + _this.fixbutton.off("mousemove touchmove"); + _this.dragStarted = +(new Date); + return _this.fixbutton.one("mousemove touchmove", function(e) { + var mousex; + mousex = e.pageX; + if (!mousex) { + mousex = e.originalEvent.touches[0].pageX; + } + _this.fixbutton_addx = _this.fixbutton.offset().left - mousex; + return _this.startDrag(); + }); + }; + })(this)); + this.fixbutton.parent().on("click touchstop touchcancel", (function(_this) { + return function(e) { + return _this.stopDrag(); + }; + })(this)); + this.resized(); + return $(window).on("resize", this.resized); + }; + + Sidebar.prototype.resized = function() { + this.page_width = $(window).width(); + this.fixbutton_initx = this.page_width - 75; + if (this.opened) { + return this.fixbutton.css({ + left: this.fixbutton_initx - this.width + }); + } else { + return this.fixbutton.css({ + left: this.fixbutton_initx + }); + } + }; + + Sidebar.prototype.startDrag = function() { + this.log("startDrag"); + this.fixbutton_targetx = this.fixbutton_initx; + this.fixbutton.addClass("dragging"); + $("
    ").appendTo(document.body); + if (navigator.userAgent.indexOf('MSIE') !== -1 || navigator.appVersion.indexOf('Trident/') > 0) { + this.fixbutton.css("pointer-events", "none"); + } + this.fixbutton.one("click", (function(_this) { + return function(e) { + _this.stopDrag(); + _this.fixbutton.removeClass("dragging"); + if (Math.abs(_this.fixbutton.offset().left - _this.fixbutton_initx) > 5) { + return e.preventDefault(); + } + }; + })(this)); + this.fixbutton.parents().on("mousemove touchmove", this.animDrag); + this.fixbutton.parents().on("mousemove touchmove", this.waitMove); + return this.fixbutton.parents().on("mouseup touchstop touchend touchcancel", (function(_this) { + return function(e) { + e.preventDefault(); + return _this.stopDrag(); + }; + })(this)); + }; + + Sidebar.prototype.waitMove = function(e) { + if (Math.abs(this.fixbutton.offset().left - this.fixbutton_targetx) > 10 && (+(new Date)) - this.dragStarted > 100) { + this.moved(); + return this.fixbutton.parents().off("mousemove touchmove", this.waitMove); + } + }; + + Sidebar.prototype.moved = function() { + var img; + this.log("Moved"); + this.createHtmltag(); + $(document.body).css("perspective", "1000px").addClass("body-sidebar"); + $(window).off("resize"); + $(window).on("resize", (function(_this) { + return function() { + $(document.body).css("height", $(window).height()); + _this.scrollable(); + return _this.resized(); + }; + })(this)); + $(window).trigger("resize"); + wrapper.setSiteInfo = (function(_this) { + return function(site_info) { + _this.setSiteInfo(site_info); + return _this.original_set_site_info.apply(wrapper, arguments); + }; + })(this); + img = new Image(); + return img.src = "/uimedia/globe/world.jpg"; + }; + + Sidebar.prototype.setSiteInfo = function(site_info) { + RateLimit(1500, (function(_this) { + return function() { + return _this.updateHtmlTag(); + }; + })(this)); + return RateLimit(30000, (function(_this) { + return function() { + return _this.displayGlobe(); + }; + })(this)); + }; + + Sidebar.prototype.createHtmltag = function() { + this.when_loaded = $.Deferred(); + if (!this.container) { + this.container = $("
    \n
    "); + this.container.appendTo(document.body); + this.tag = this.container.find(".sidebar"); + this.updateHtmlTag(); + return this.scrollable = window.initScrollable(); + } + }; + + Sidebar.prototype.updateHtmlTag = function() { + if (this.preload_html) { + this.setHtmlTag(this.preload_html); + return this.preload_html = null; + } else { + return wrapper.ws.cmd("sidebarGetHtmlTag", {}, this.setHtmlTag); + } + }; + + Sidebar.prototype.setHtmlTag = function(res) { + if (this.tag.find(".content").children().length === 0) { + this.log("Creating content"); + this.container.addClass("loaded"); + morphdom(this.tag.find(".content")[0], '
    ' + res + '
    '); + return this.when_loaded.resolve(); + } else { + this.log("Patching content"); + return morphdom(this.tag.find(".content")[0], '
    ' + res + '
    ', { + onBeforeMorphEl: function(from_el, to_el) { + if (from_el.className === "globe" || from_el.className.indexOf("noupdate") >= 0) { + return false; + } else { + return true; + } + } + }); + } + }; + + Sidebar.prototype.animDrag = function(e) { + var mousex, overdrag, overdrag_percent, targetx; + mousex = e.pageX; + if (!mousex) { + mousex = e.originalEvent.touches[0].pageX; + } + overdrag = this.fixbutton_initx - this.width - mousex; + if (overdrag > 0) { + overdrag_percent = 1 + overdrag / 300; + mousex = (mousex + (this.fixbutton_initx - this.width) * overdrag_percent) / (1 + overdrag_percent); + } + targetx = this.fixbutton_initx - mousex - this.fixbutton_addx; + this.fixbutton[0].style.left = (mousex + this.fixbutton_addx) + "px"; + if (this.tag) { + this.tag[0].style.transform = "translateX(" + (0 - targetx) + "px)"; + } + if ((!this.opened && targetx > this.width / 3) || (this.opened && targetx > this.width * 0.9)) { + return this.fixbutton_targetx = this.fixbutton_initx - this.width; + } else { + return this.fixbutton_targetx = this.fixbutton_initx; + } + }; + + Sidebar.prototype.stopDrag = function() { + var targetx; + this.fixbutton.parents().off("mousemove touchmove"); + this.fixbutton.off("mousemove touchmove"); + this.fixbutton.css("pointer-events", ""); + $(".drag-bg").remove(); + if (!this.fixbutton.hasClass("dragging")) { + return; + } + this.fixbutton.removeClass("dragging"); + if (this.fixbutton_targetx !== this.fixbutton.offset().left) { + this.fixbutton.stop().animate({ + "left": this.fixbutton_targetx + }, 500, "easeOutBack", (function(_this) { + return function() { + if (_this.fixbutton_targetx === _this.fixbutton_initx) { + _this.fixbutton.css("left", "auto"); + } else { + _this.fixbutton.css("left", _this.fixbutton_targetx); + } + return $(".fixbutton-bg").trigger("mouseout"); + }; + })(this)); + if (this.fixbutton_targetx === this.fixbutton_initx) { + targetx = 0; + this.opened = false; + } else { + targetx = this.width; + if (!this.opened) { + this.when_loaded.done((function(_this) { + return function() { + return _this.onOpened(); + }; + })(this)); + } + this.opened = true; + } + if (this.tag) { + this.tag.css("transition", "0.4s ease-out"); + this.tag.css("transform", "translateX(-" + targetx + "px)").one(transitionEnd, (function(_this) { + return function() { + _this.tag.css("transition", ""); + if (!_this.opened) { + _this.container.remove(); + _this.container = null; + _this.tag.remove(); + return _this.tag = null; + } + }; + })(this)); + } + this.log("stopdrag", "opened:", this.opened); + if (!this.opened) { + return this.onClosed(); + } + } + }; + + Sidebar.prototype.onOpened = function() { + this.log("Opened"); + this.scrollable(); + this.tag.find("#checkbox-owned").off("click").on("click", (function(_this) { + return function() { + return setTimeout((function() { + return _this.scrollable(); + }), 300); + }; + })(this)); + this.tag.find("#button-sitelimit").off("click").on("click", (function(_this) { + return function() { + wrapper.ws.cmd("siteSetLimit", $("#input-sitelimit").val(), function(res) { + if (res === "ok") { + wrapper.notifications.add("done-sitelimit", "done", "Site storage limit modified!", 5000); + } + return _this.updateHtmlTag(); + }); + return false; + }; + })(this)); + this.tag.find("#button-dbreload").off("click").on("click", (function(_this) { + return function() { + wrapper.ws.cmd("dbReload", [], function() { + wrapper.notifications.add("done-dbreload", "done", "Database schema reloaded!", 5000); + return _this.updateHtmlTag(); + }); + return false; + }; + })(this)); + this.tag.find("#button-dbrebuild").off("click").on("click", (function(_this) { + return function() { + wrapper.notifications.add("done-dbrebuild", "info", "Database rebuilding...."); + wrapper.ws.cmd("dbRebuild", [], function() { + wrapper.notifications.add("done-dbrebuild", "done", "Database rebuilt!", 5000); + return _this.updateHtmlTag(); + }); + return false; + }; + })(this)); + this.tag.find("#button-update").off("click").on("click", (function(_this) { + return function() { + _this.tag.find("#button-update").addClass("loading"); + wrapper.ws.cmd("siteUpdate", wrapper.site_info.address, function() { + wrapper.notifications.add("done-updated", "done", "Site updated!", 5000); + return _this.tag.find("#button-update").removeClass("loading"); + }); + return false; + }; + })(this)); + this.tag.find("#button-pause").off("click").on("click", (function(_this) { + return function() { + _this.tag.find("#button-pause").addClass("hidden"); + wrapper.ws.cmd("sitePause", wrapper.site_info.address); + return false; + }; + })(this)); + this.tag.find("#button-resume").off("click").on("click", (function(_this) { + return function() { + _this.tag.find("#button-resume").addClass("hidden"); + wrapper.ws.cmd("siteResume", wrapper.site_info.address); + return false; + }; + })(this)); + this.tag.find("#button-delete").off("click").on("click", (function(_this) { + return function() { + wrapper.displayConfirm("Are you sure?", "Delete this site", function() { + _this.tag.find("#button-delete").addClass("loading"); + return wrapper.ws.cmd("siteDelete", wrapper.site_info.address, function() { + return document.location = $(".fixbutton-bg").attr("href"); + }); + }); + return false; + }; + })(this)); + this.tag.find("#checkbox-owned").off("click").on("click", (function(_this) { + return function() { + return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]); + }; + })(this)); + this.tag.find("#checkbox-autodownloadoptional").off("click").on("click", (function(_this) { + return function() { + return wrapper.ws.cmd("siteSetAutodownloadoptional", [_this.tag.find("#checkbox-autodownloadoptional").is(":checked")]); + }; + })(this)); + this.tag.find("#button-identity").off("click").on("click", (function(_this) { + return function() { + wrapper.ws.cmd("certSelect"); + return false; + }; + })(this)); + this.tag.find("#checkbox-owned").off("click").on("click", (function(_this) { + return function() { + return wrapper.ws.cmd("siteSetOwned", [_this.tag.find("#checkbox-owned").is(":checked")]); + }; + })(this)); + this.tag.find("#button-settings").off("click").on("click", (function(_this) { + return function() { + wrapper.ws.cmd("fileGet", "content.json", function(res) { + var data, json_raw; + data = JSON.parse(res); + data["title"] = $("#settings-title").val(); + data["description"] = $("#settings-description").val(); + json_raw = unescape(encodeURIComponent(JSON.stringify(data, void 0, '\t'))); + return wrapper.ws.cmd("fileWrite", ["content.json", btoa(json_raw), true], function(res) { + if (res !== "ok") { + return wrapper.notifications.add("file-write", "error", "File write error: " + res); + } else { + wrapper.notifications.add("file-write", "done", "Site settings saved!", 5000); + return _this.updateHtmlTag(); + } + }); + }); + return false; + }; + })(this)); + this.tag.find("#button-sign").off("click").on("click", (function(_this) { + return function() { + var inner_path; + inner_path = _this.tag.find("#input-contents").val(); + if (wrapper.site_info.privatekey) { + wrapper.ws.cmd("siteSign", { + privatekey: "stored", + inner_path: inner_path, + update_changed_files: true + }, function(res) { + return wrapper.notifications.add("sign", "done", inner_path + " Signed!", 5000); + }); + } else { + wrapper.displayPrompt("Enter your private key:", "password", "Sign", function(privatekey) { + return wrapper.ws.cmd("siteSign", { + privatekey: privatekey, + inner_path: inner_path, + update_changed_files: true + }, function(res) { + if (res === "ok") { + return wrapper.notifications.add("sign", "done", inner_path + " Signed!", 5000); + } + }); + }); + } + return false; + }; + })(this)); + this.tag.find("#button-publish").off("click").on("click", (function(_this) { + return function() { + var inner_path; + inner_path = _this.tag.find("#input-contents").val(); + _this.tag.find("#button-publish").addClass("loading"); + return wrapper.ws.cmd("sitePublish", { + "inner_path": inner_path, + "sign": false + }, function() { + return _this.tag.find("#button-publish").removeClass("loading"); + }); + }; + })(this)); + return this.loadGlobe(); + }; + + Sidebar.prototype.onClosed = function() { + $(window).off("resize"); + $(window).on("resize", this.resized); + $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on(transitionEnd, (function(_this) { + return function(e) { + if (e.target === document.body) { + $(document.body).css("height", "auto").css("perspective", "").css("transition", "").off(transitionEnd); + return _this.unloadGlobe(); + } + }; + })(this)); + return wrapper.setSiteInfo = this.original_set_site_info; + }; + + Sidebar.prototype.loadGlobe = function() { + console.log("loadGlobe", this.tag.find(".globe").hasClass("loading")); + if (this.tag.find(".globe").hasClass("loading")) { + return setTimeout(((function(_this) { + return function() { + if (typeof DAT === "undefined") { + return $.getScript("/uimedia/globe/all.js", _this.displayGlobe); + } else { + return _this.displayGlobe(); + } + }; + })(this)), 600); + } + }; + + Sidebar.prototype.displayGlobe = function() { + var img; + img = new Image(); + img.src = "/uimedia/globe/world.jpg"; + return img.onload = (function(_this) { + return function() { + return wrapper.ws.cmd("sidebarGetPeers", [], function(globe_data) { + var e, ref, ref1; + if (_this.globe) { + _this.globe.scene.remove(_this.globe.points); + _this.globe.addData(globe_data, { + format: 'magnitude', + name: "hello", + animated: false + }); + _this.globe.createPoints(); + } else if (typeof DAT !== "undefined") { + try { + _this.globe = new DAT.Globe(_this.tag.find(".globe")[0], { + "imgDir": "/uimedia/globe/" + }); + _this.globe.addData(globe_data, { + format: 'magnitude', + name: "hello" + }); + _this.globe.createPoints(); + _this.globe.animate(); + } catch (error) { + e = error; + console.log("WebGL error", e); + if ((ref = _this.tag) != null) { + ref.find(".globe").addClass("error").text("WebGL not supported"); + } + } + } + return (ref1 = _this.tag) != null ? ref1.find(".globe").removeClass("loading") : void 0; + }); + }; + })(this); + }; + + Sidebar.prototype.unloadGlobe = function() { + if (!this.globe) { + return false; + } + this.globe.unload(); + return this.globe = null; + }; + + return Sidebar; + + })(Class); + + setTimeout((function() { + return window.sidebar = new Sidebar(); + }), 500); + + window.transitionEnd = 'transitionend webkitTransitionEnd oTransitionEnd otransitionend'; + +}).call(this); + + + +/* ---- plugins/Sidebar/media/morphdom.js ---- */ + + +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.morphdom = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o element + * since it sets the initial value. Changing the "value" + * attribute without changing the "value" property will have + * no effect since it is only used to the set the initial value. + * Similar for the "checked" attribute. + */ + /*INPUT: function(fromEl, toEl) { + fromEl.checked = toEl.checked; + fromEl.value = toEl.value; + + if (!toEl.hasAttribute('checked')) { + fromEl.removeAttribute('checked'); + } + + if (!toEl.hasAttribute('value')) { + fromEl.removeAttribute('value'); + } + }*/ +}; + +function noop() {} + +/** + * Loop over all of the attributes on the target node and make sure the + * original DOM node has the same attributes. If an attribute + * found on the original node is not on the new node then remove it from + * the original node + * @param {HTMLElement} fromNode + * @param {HTMLElement} toNode + */ +function morphAttrs(fromNode, toNode) { + var attrs = toNode.attributes; + var i; + var attr; + var attrName; + var attrValue; + var foundAttrs = {}; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + attrValue = attr.value; + foundAttrs[attrName] = true; + + if (fromNode.getAttribute(attrName) !== attrValue) { + fromNode.setAttribute(attrName, attrValue); + } + } + } + + // Delete any extra attributes found on the original DOM element that weren't + // found on the target element. + attrs = fromNode.attributes; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + if (!foundAttrs.hasOwnProperty(attrName)) { + fromNode.removeAttribute(attrName); + } + } + } +} + +/** + * Copies the children of one DOM element to another DOM element + */ +function moveChildren(from, to) { + var curChild = from.firstChild; + while(curChild) { + var nextChild = curChild.nextSibling; + to.appendChild(curChild); + curChild = nextChild; + } + return to; +} + +function morphdom(fromNode, toNode, options) { + if (!options) { + options = {}; + } + + if (typeof toNode === 'string') { + var newBodyEl = document.createElement('body'); + newBodyEl.innerHTML = toNode; + toNode = newBodyEl.childNodes[0]; + } + + var savedEls = {}; // Used to save off DOM elements with IDs + var unmatchedEls = {}; + var onNodeDiscarded = options.onNodeDiscarded || noop; + var onBeforeMorphEl = options.onBeforeMorphEl || noop; + var onBeforeMorphElChildren = options.onBeforeMorphElChildren || noop; + + function removeNodeHelper(node, nestedInSavedEl) { + var id = node.id; + // If the node has an ID then save it off since we will want + // to reuse it in case the target DOM tree has a DOM element + // with the same ID + if (id) { + savedEls[id] = node; + } else if (!nestedInSavedEl) { + // If we are not nested in a saved element then we know that this node has been + // completely discarded and will not exist in the final DOM. + onNodeDiscarded(node); + } + + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + removeNodeHelper(curChild, nestedInSavedEl || id); + curChild = curChild.nextSibling; + } + } + } + + function walkDiscardedChildNodes(node) { + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + + + if (!curChild.id) { + // We only want to handle nodes that don't have an ID to avoid double + // walking the same saved element. + + onNodeDiscarded(curChild); + + // Walk recursively + walkDiscardedChildNodes(curChild); + } + + curChild = curChild.nextSibling; + } + } + } + + function removeNode(node, parentNode, alreadyVisited) { + parentNode.removeChild(node); + + if (alreadyVisited) { + if (!node.id) { + onNodeDiscarded(node); + walkDiscardedChildNodes(node); + } + } else { + removeNodeHelper(node); + } + } + + function morphEl(fromNode, toNode, alreadyVisited) { + if (toNode.id) { + // If an element with an ID is being morphed then it is will be in the final + // DOM so clear it out of the saved elements collection + delete savedEls[toNode.id]; + } + + if (onBeforeMorphEl(fromNode, toNode) === false) { + return; + } + + morphAttrs(fromNode, toNode); + + if (onBeforeMorphElChildren(fromNode, toNode) === false) { + return; + } + + var curToNodeChild = toNode.firstChild; + var curFromNodeChild = fromNode.firstChild; + var curToNodeId; + + var fromNextSibling; + var toNextSibling; + var savedEl; + var unmatchedEl; + + outer: while(curToNodeChild) { + toNextSibling = curToNodeChild.nextSibling; + curToNodeId = curToNodeChild.id; + + while(curFromNodeChild) { + var curFromNodeId = curFromNodeChild.id; + fromNextSibling = curFromNodeChild.nextSibling; + + if (!alreadyVisited) { + if (curFromNodeId && (unmatchedEl = unmatchedEls[curFromNodeId])) { + unmatchedEl.parentNode.replaceChild(curFromNodeChild, unmatchedEl); + morphEl(curFromNodeChild, unmatchedEl, alreadyVisited); + curFromNodeChild = fromNextSibling; + continue; + } + } + + var curFromNodeType = curFromNodeChild.nodeType; + + if (curFromNodeType === curToNodeChild.nodeType) { + var isCompatible = false; + + if (curFromNodeType === 1) { // Both nodes being compared are Element nodes + if (curFromNodeChild.tagName === curToNodeChild.tagName) { + // We have compatible DOM elements + if (curFromNodeId || curToNodeId) { + // If either DOM element has an ID then we handle + // those differently since we want to match up + // by ID + if (curToNodeId === curFromNodeId) { + isCompatible = true; + } + } else { + isCompatible = true; + } + } + + if (isCompatible) { + // We found compatible DOM elements so add a + // task to morph the compatible DOM elements + morphEl(curFromNodeChild, curToNodeChild, alreadyVisited); + } + } else if (curFromNodeType === 3) { // Both nodes being compared are Text nodes + isCompatible = true; + curFromNodeChild.nodeValue = curToNodeChild.nodeValue; + } + + if (isCompatible) { + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + continue outer; + } + } + + // No compatible match so remove the old node from the DOM + removeNode(curFromNodeChild, fromNode, alreadyVisited); + + curFromNodeChild = fromNextSibling; + } + + if (curToNodeId) { + if ((savedEl = savedEls[curToNodeId])) { + morphEl(savedEl, curToNodeChild, true); + curToNodeChild = savedEl; // We want to append the saved element instead + } else { + // The current DOM element in the target tree has an ID + // but we did not find a match in any of the corresponding + // siblings. We just put the target element in the old DOM tree + // but if we later find an element in the old DOM tree that has + // a matching ID then we will replace the target element + // with the corresponding old element and morph the old element + unmatchedEls[curToNodeId] = curToNodeChild; + } + } + + // If we got this far then we did not find a candidate match for our "to node" + // and we exhausted all of the children "from" nodes. Therefore, we will just + // append the current "to node" to the end + fromNode.appendChild(curToNodeChild); + + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + } + + // We have processed all of the "to nodes". If curFromNodeChild is non-null then + // we still have some from nodes left over that need to be removed + while(curFromNodeChild) { + fromNextSibling = curFromNodeChild.nextSibling; + removeNode(curFromNodeChild, fromNode, alreadyVisited); + curFromNodeChild = fromNextSibling; + } + + var specialElHandler = specialElHandlers[fromNode.tagName]; + if (specialElHandler) { + specialElHandler(fromNode, toNode); + } + } + + var morphedNode = fromNode; + var morphedNodeType = morphedNode.nodeType; + var toNodeType = toNode.nodeType; + + // Handle the case where we are given two DOM nodes that are not + // compatible (e.g.
    --> or
    --> TEXT) + if (morphedNodeType === 1) { + if (toNodeType === 1) { + if (morphedNode.tagName !== toNode.tagName) { + onNodeDiscarded(fromNode); + morphedNode = moveChildren(morphedNode, document.createElement(toNode.tagName)); + } + } else { + // Going from an element node to a text node + return toNode; + } + } else if (morphedNodeType === 3) { // Text node + if (toNodeType === 3) { + morphedNode.nodeValue = toNode.nodeValue; + return morphedNode; + } else { + onNodeDiscarded(fromNode); + // Text node to something else + return toNode; + } + } + + morphEl(morphedNode, toNode, false); + + // Fire the "onNodeDiscarded" event for any saved elements + // that never found a new home in the morphed DOM + for (var savedElId in savedEls) { + if (savedEls.hasOwnProperty(savedElId)) { + var savedEl = savedEls[savedElId]; + onNodeDiscarded(savedEl); + walkDiscardedChildNodes(savedEl); + } + } + + if (morphedNode !== fromNode && fromNode.parentNode) { + fromNode.parentNode.replaceChild(morphedNode, fromNode); + } + + return morphedNode; +} + +module.exports = morphdom; +},{}]},{},[1])(1) +}); \ No newline at end of file diff --git a/plugins/Sidebar/media/morphdom.js b/plugins/Sidebar/media/morphdom.js new file mode 100644 index 00000000..6829eef3 --- /dev/null +++ b/plugins/Sidebar/media/morphdom.js @@ -0,0 +1,340 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.morphdom = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o element + * since it sets the initial value. Changing the "value" + * attribute without changing the "value" property will have + * no effect since it is only used to the set the initial value. + * Similar for the "checked" attribute. + */ + /*INPUT: function(fromEl, toEl) { + fromEl.checked = toEl.checked; + fromEl.value = toEl.value; + + if (!toEl.hasAttribute('checked')) { + fromEl.removeAttribute('checked'); + } + + if (!toEl.hasAttribute('value')) { + fromEl.removeAttribute('value'); + } + }*/ +}; + +function noop() {} + +/** + * Loop over all of the attributes on the target node and make sure the + * original DOM node has the same attributes. If an attribute + * found on the original node is not on the new node then remove it from + * the original node + * @param {HTMLElement} fromNode + * @param {HTMLElement} toNode + */ +function morphAttrs(fromNode, toNode) { + var attrs = toNode.attributes; + var i; + var attr; + var attrName; + var attrValue; + var foundAttrs = {}; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + attrValue = attr.value; + foundAttrs[attrName] = true; + + if (fromNode.getAttribute(attrName) !== attrValue) { + fromNode.setAttribute(attrName, attrValue); + } + } + } + + // Delete any extra attributes found on the original DOM element that weren't + // found on the target element. + attrs = fromNode.attributes; + + for (i=attrs.length-1; i>=0; i--) { + attr = attrs[i]; + if (attr.specified !== false) { + attrName = attr.name; + if (!foundAttrs.hasOwnProperty(attrName)) { + fromNode.removeAttribute(attrName); + } + } + } +} + +/** + * Copies the children of one DOM element to another DOM element + */ +function moveChildren(from, to) { + var curChild = from.firstChild; + while(curChild) { + var nextChild = curChild.nextSibling; + to.appendChild(curChild); + curChild = nextChild; + } + return to; +} + +function morphdom(fromNode, toNode, options) { + if (!options) { + options = {}; + } + + if (typeof toNode === 'string') { + var newBodyEl = document.createElement('body'); + newBodyEl.innerHTML = toNode; + toNode = newBodyEl.childNodes[0]; + } + + var savedEls = {}; // Used to save off DOM elements with IDs + var unmatchedEls = {}; + var onNodeDiscarded = options.onNodeDiscarded || noop; + var onBeforeMorphEl = options.onBeforeMorphEl || noop; + var onBeforeMorphElChildren = options.onBeforeMorphElChildren || noop; + + function removeNodeHelper(node, nestedInSavedEl) { + var id = node.id; + // If the node has an ID then save it off since we will want + // to reuse it in case the target DOM tree has a DOM element + // with the same ID + if (id) { + savedEls[id] = node; + } else if (!nestedInSavedEl) { + // If we are not nested in a saved element then we know that this node has been + // completely discarded and will not exist in the final DOM. + onNodeDiscarded(node); + } + + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + removeNodeHelper(curChild, nestedInSavedEl || id); + curChild = curChild.nextSibling; + } + } + } + + function walkDiscardedChildNodes(node) { + if (node.nodeType === 1) { + var curChild = node.firstChild; + while(curChild) { + + + if (!curChild.id) { + // We only want to handle nodes that don't have an ID to avoid double + // walking the same saved element. + + onNodeDiscarded(curChild); + + // Walk recursively + walkDiscardedChildNodes(curChild); + } + + curChild = curChild.nextSibling; + } + } + } + + function removeNode(node, parentNode, alreadyVisited) { + parentNode.removeChild(node); + + if (alreadyVisited) { + if (!node.id) { + onNodeDiscarded(node); + walkDiscardedChildNodes(node); + } + } else { + removeNodeHelper(node); + } + } + + function morphEl(fromNode, toNode, alreadyVisited) { + if (toNode.id) { + // If an element with an ID is being morphed then it is will be in the final + // DOM so clear it out of the saved elements collection + delete savedEls[toNode.id]; + } + + if (onBeforeMorphEl(fromNode, toNode) === false) { + return; + } + + morphAttrs(fromNode, toNode); + + if (onBeforeMorphElChildren(fromNode, toNode) === false) { + return; + } + + var curToNodeChild = toNode.firstChild; + var curFromNodeChild = fromNode.firstChild; + var curToNodeId; + + var fromNextSibling; + var toNextSibling; + var savedEl; + var unmatchedEl; + + outer: while(curToNodeChild) { + toNextSibling = curToNodeChild.nextSibling; + curToNodeId = curToNodeChild.id; + + while(curFromNodeChild) { + var curFromNodeId = curFromNodeChild.id; + fromNextSibling = curFromNodeChild.nextSibling; + + if (!alreadyVisited) { + if (curFromNodeId && (unmatchedEl = unmatchedEls[curFromNodeId])) { + unmatchedEl.parentNode.replaceChild(curFromNodeChild, unmatchedEl); + morphEl(curFromNodeChild, unmatchedEl, alreadyVisited); + curFromNodeChild = fromNextSibling; + continue; + } + } + + var curFromNodeType = curFromNodeChild.nodeType; + + if (curFromNodeType === curToNodeChild.nodeType) { + var isCompatible = false; + + if (curFromNodeType === 1) { // Both nodes being compared are Element nodes + if (curFromNodeChild.tagName === curToNodeChild.tagName) { + // We have compatible DOM elements + if (curFromNodeId || curToNodeId) { + // If either DOM element has an ID then we handle + // those differently since we want to match up + // by ID + if (curToNodeId === curFromNodeId) { + isCompatible = true; + } + } else { + isCompatible = true; + } + } + + if (isCompatible) { + // We found compatible DOM elements so add a + // task to morph the compatible DOM elements + morphEl(curFromNodeChild, curToNodeChild, alreadyVisited); + } + } else if (curFromNodeType === 3) { // Both nodes being compared are Text nodes + isCompatible = true; + curFromNodeChild.nodeValue = curToNodeChild.nodeValue; + } + + if (isCompatible) { + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + continue outer; + } + } + + // No compatible match so remove the old node from the DOM + removeNode(curFromNodeChild, fromNode, alreadyVisited); + + curFromNodeChild = fromNextSibling; + } + + if (curToNodeId) { + if ((savedEl = savedEls[curToNodeId])) { + morphEl(savedEl, curToNodeChild, true); + curToNodeChild = savedEl; // We want to append the saved element instead + } else { + // The current DOM element in the target tree has an ID + // but we did not find a match in any of the corresponding + // siblings. We just put the target element in the old DOM tree + // but if we later find an element in the old DOM tree that has + // a matching ID then we will replace the target element + // with the corresponding old element and morph the old element + unmatchedEls[curToNodeId] = curToNodeChild; + } + } + + // If we got this far then we did not find a candidate match for our "to node" + // and we exhausted all of the children "from" nodes. Therefore, we will just + // append the current "to node" to the end + fromNode.appendChild(curToNodeChild); + + curToNodeChild = toNextSibling; + curFromNodeChild = fromNextSibling; + } + + // We have processed all of the "to nodes". If curFromNodeChild is non-null then + // we still have some from nodes left over that need to be removed + while(curFromNodeChild) { + fromNextSibling = curFromNodeChild.nextSibling; + removeNode(curFromNodeChild, fromNode, alreadyVisited); + curFromNodeChild = fromNextSibling; + } + + var specialElHandler = specialElHandlers[fromNode.tagName]; + if (specialElHandler) { + specialElHandler(fromNode, toNode); + } + } + + var morphedNode = fromNode; + var morphedNodeType = morphedNode.nodeType; + var toNodeType = toNode.nodeType; + + // Handle the case where we are given two DOM nodes that are not + // compatible (e.g.
    --> or
    --> TEXT) + if (morphedNodeType === 1) { + if (toNodeType === 1) { + if (morphedNode.tagName !== toNode.tagName) { + onNodeDiscarded(fromNode); + morphedNode = moveChildren(morphedNode, document.createElement(toNode.tagName)); + } + } else { + // Going from an element node to a text node + return toNode; + } + } else if (morphedNodeType === 3) { // Text node + if (toNodeType === 3) { + morphedNode.nodeValue = toNode.nodeValue; + return morphedNode; + } else { + onNodeDiscarded(fromNode); + // Text node to something else + return toNode; + } + } + + morphEl(morphedNode, toNode, false); + + // Fire the "onNodeDiscarded" event for any saved elements + // that never found a new home in the morphed DOM + for (var savedElId in savedEls) { + if (savedEls.hasOwnProperty(savedElId)) { + var savedEl = savedEls[savedElId]; + onNodeDiscarded(savedEl); + walkDiscardedChildNodes(savedEl); + } + } + + if (morphedNode !== fromNode && fromNode.parentNode) { + fromNode.parentNode.replaceChild(morphedNode, fromNode); + } + + return morphedNode; +} + +module.exports = morphdom; +},{}]},{},[1])(1) +}); \ No newline at end of file diff --git a/plugins/Stats/StatsPlugin.py b/plugins/Stats/StatsPlugin.py new file mode 100644 index 00000000..2b2b7353 --- /dev/null +++ b/plugins/Stats/StatsPlugin.py @@ -0,0 +1,756 @@ +import time +import cgi +import os + +from Plugin import PluginManager +from Config import config + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + + def formatTableRow(self, row, class_name=""): + back = [] + for format, val in row: + if val is None: + formatted = "n/a" + elif format == "since": + if val: + formatted = "%.0f" % (time.time() - val) + else: + formatted = "n/a" + else: + formatted = format % val + back.append("%s" % formatted) + return "%s" % (class_name, "".join(back)) + + def getObjSize(self, obj, hpy=None): + if hpy: + return float(hpy.iso(obj).domisize) / 1024 + else: + return 0 + + # /Stats entry point + def actionStats(self): + import gc + import sys + from Ui import UiRequest + from Db import Db + from Crypt import CryptConnection + + hpy = None + if self.get.get("size") == "1": # Calc obj size + try: + import guppy + hpy = guppy.hpy() + except: + pass + self.sendHeader() + + if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: + yield "This function is disabled on this proxy" + raise StopIteration + + s = time.time() + main = sys.modules["main"] + + # Style + yield """ + + """ + + # Memory + try: + yield "rev%s | " % config.rev + yield "%s | " % config.ip_external + yield "Opened: %s | " % main.file_server.port_opened + yield "Crypt: %s | " % CryptConnection.manager.crypt_supported + yield "In: %.2fMB, Out: %.2fMB | " % ( + float(main.file_server.bytes_recv) / 1024 / 1024, + float(main.file_server.bytes_sent) / 1024 / 1024 + ) + yield "Peerid: %s | " % main.file_server.peer_id + import psutil + process = psutil.Process(os.getpid()) + mem = process.get_memory_info()[0] / float(2 ** 20) + yield "Mem: %.2fMB | " % mem + yield "Threads: %s | " % len(process.threads()) + yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times() + yield "Files: %s | " % len(process.open_files()) + yield "Sockets: %s | " % len(process.connections()) + yield "Calc size on off" + except Exception: + pass + yield "
    " + + # Connections + yield "Connections (%s, total made: %s):
    " % ( + len(main.file_server.connections), main.file_server.last_connection_id + ) + yield "" + yield "" + yield "" + for connection in main.file_server.connections: + if "cipher" in dir(connection.sock): + cipher = connection.sock.cipher()[0] + else: + cipher = connection.crypt + yield self.formatTableRow([ + ("%3d", connection.id), + ("%s", connection.type), + ("%s:%s", (connection.ip, connection.port)), + ("%s", connection.handshake.get("port_opened")), + ("%s", (connection.crypt, cipher)), + ("%6.3f", connection.last_ping_delay), + ("%s", connection.incomplete_buff_recv), + ("%s", connection.bad_actions), + ("since", max(connection.last_send_time, connection.last_recv_time)), + ("since", connection.start_time), + ("%.3f", connection.last_sent_time - connection.last_send_time), + ("%.3f", connection.cpu_time), + ("%.0fkB", connection.bytes_sent / 1024), + ("%.0fkB", connection.bytes_recv / 1024), + ("%s", connection.last_cmd), + ("%s", connection.waiting_requests.keys()), + ("%s r%s", (connection.handshake.get("version"), connection.handshake.get("rev", "?"))), + ("%s", connection.sites) + ]) + yield "
    id type ip open crypt pingbuff bad idle open delay cpu out in last sentwait version sites
    " + + # Tor hidden services + yield "

    Tor hidden services (status: %s):
    " % main.file_server.tor_manager.status + for site_address, onion in main.file_server.tor_manager.site_onions.items(): + yield "- %-34s: %s
    " % (site_address, onion) + + # Db + yield "

    Db:
    " + for db in sys.modules["Db.Db"].opened_dbs: + yield "- %.3fs: %s
    " % (time.time() - db.last_query_time, db.db_path.encode("utf8")) + + # Sites + yield "

    Sites:" + yield "" + yield "" + for site in sorted(self.server.sites.values(), lambda a, b: cmp(a.address,b.address)): + yield self.formatTableRow([ + ( + """%s""", + (site.address, site.address) + ), + ("%s", [peer.connection.id for peer in site.peers.values() if peer.connection and peer.connection.connected]), + ("%s/%s/%s", ( + len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]), + len(site.getConnectablePeers(100)), + len(site.peers) + )), + ("%s (loaded: %s)", ( + len(site.content_manager.contents), + len([key for key, val in dict(site.content_manager.contents).iteritems() if val]) + )), + ("%.0fkB", site.settings.get("bytes_sent", 0) / 1024), + ("%.0fkB", site.settings.get("bytes_recv", 0) / 1024), + ], "serving-%s" % site.settings["serving"]) + yield "" + yield "
    address connected peers content.json out in
    " + + # No more if not in debug mode + if not config.debug: + raise StopIteration + + # Object types + + obj_count = {} + for obj in gc.get_objects(): + obj_type = str(type(obj)) + if obj_type not in obj_count: + obj_count[obj_type] = [0, 0] + obj_count[obj_type][0] += 1 # Count + obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024 # Size + + yield "

    Objects in memory (types: %s, total: %s, %.2fkb):
    " % ( + len(obj_count), + sum([stat[0] for stat in obj_count.values()]), + sum([stat[1] for stat in obj_count.values()]) + ) + + for obj, stat in sorted(obj_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count + yield " - %.1fkb = %s x %s
    " % (stat[1], stat[0], obj, cgi.escape(obj)) + + # Classes + + class_count = {} + for obj in gc.get_objects(): + obj_type = str(type(obj)) + if obj_type != "": + continue + class_name = obj.__class__.__name__ + if class_name not in class_count: + class_count[class_name] = [0, 0] + class_count[class_name][0] += 1 # Count + class_count[class_name][1] += float(sys.getsizeof(obj)) / 1024 # Size + + yield "

    Classes in memory (types: %s, total: %s, %.2fkb):
    " % ( + len(class_count), + sum([stat[0] for stat in class_count.values()]), + sum([stat[1] for stat in class_count.values()]) + ) + + for obj, stat in sorted(class_count.items(), key=lambda x: x[1][0], reverse=True): # Sorted by count + yield " - %.1fkb = %s x %s
    " % (stat[1], stat[0], obj, cgi.escape(obj)) + + from greenlet import greenlet + objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] + yield "
    Greenlets (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj).encode("utf8"))) + + from Worker import Worker + objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)] + yield "
    Workers (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from Connection import Connection + objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)] + yield "
    Connections (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from socket import socket + objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)] + yield "
    Sockets (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from msgpack import Unpacker + objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)] + yield "
    Msgpack unpacker (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from Site import Site + objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)] + yield "
    Sites (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + objs = [obj for obj in gc.get_objects() if isinstance(obj, self.server.log.__class__)] + yield "
    Loggers (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj.name))) + + objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)] + yield "
    UiRequests (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + from Peer import Peer + objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)] + yield "
    Peers (%s):
    " % len(objs) + for obj in objs: + yield " - %.1fkb: %s
    " % (self.getObjSize(obj, hpy), cgi.escape(repr(obj))) + + objs = [(key, val) for key, val in sys.modules.iteritems() if val is not None] + objs.sort() + yield "
    Modules (%s):
    " % len(objs) + for module_name, module in objs: + yield " - %.3fkb: %s %s
    " % (self.getObjSize(module, hpy), module_name, cgi.escape(repr(module))) + + gc.collect() # Implicit grabage collection + yield "Done in %.1f" % (time.time() - s) + + def actionDumpobj(self): + + import gc + import sys + + self.sendHeader() + + if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: + yield "This function is disabled on this proxy" + raise StopIteration + + # No more if not in debug mode + if not config.debug: + yield "Not in debug mode" + raise StopIteration + + class_filter = self.get.get("class") + + yield """ + + """ + + objs = gc.get_objects() + for obj in objs: + obj_type = str(type(obj)) + if obj_type != "" or obj.__class__.__name__ != class_filter: + continue + yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj))) + for attr in dir(obj): + yield "- %s: %s
    " % (attr, cgi.escape(str(getattr(obj, attr)))) + yield "
    " + + gc.collect() # Implicit grabage collection + + def actionListobj(self): + + import gc + import sys + + self.sendHeader() + + if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: + yield "This function is disabled on this proxy" + raise StopIteration + + # No more if not in debug mode + if not config.debug: + yield "Not in debug mode" + raise StopIteration + + type_filter = self.get.get("type") + + yield """ + + """ + + yield "Listing all %s objects in memory...
    " % cgi.escape(type_filter) + + ref_count = {} + objs = gc.get_objects() + for obj in objs: + obj_type = str(type(obj)) + if obj_type != type_filter: + continue + refs = [ + ref for ref in gc.get_referrers(obj) + if hasattr(ref, "__class__") and + ref.__class__.__name__ not in ["list", "dict", "function", "type", "frame", "WeakSet", "tuple"] + ] + if not refs: + continue + try: + yield "%.1fkb %s... " % ( + float(sys.getsizeof(obj)) / 1024, cgi.escape(str(obj)), cgi.escape(str(obj)[0:100].ljust(100)) + ) + except: + continue + for ref in refs: + yield " [" + if "object at" in str(ref) or len(str(ref)) > 100: + yield str(ref.__class__.__name__) + else: + yield str(ref.__class__.__name__) + ":" + cgi.escape(str(ref)) + yield "] " + ref_type = ref.__class__.__name__ + if ref_type not in ref_count: + ref_count[ref_type] = [0, 0] + ref_count[ref_type][0] += 1 # Count + ref_count[ref_type][1] += float(sys.getsizeof(obj)) / 1024 # Size + yield "
    " + + yield "
    Object referrer (total: %s, %.2fkb):
    " % (len(ref_count), sum([stat[1] for stat in ref_count.values()])) + + for obj, stat in sorted(ref_count.items(), key=lambda x: x[1][0], reverse=True)[0:30]: # Sorted by count + yield " - %.1fkb = %s x %s
    " % (stat[1], stat[0], cgi.escape(str(obj))) + + gc.collect() # Implicit grabage collection + + def actionBenchmark(self): + import sys + import gc + from contextlib import contextmanager + + output = self.sendHeader() + + if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local: + yield "This function is disabled on this proxy" + raise StopIteration + + @contextmanager + def benchmark(name, standard): + s = time.time() + output("- %s" % name) + try: + yield 1 + except Exception, err: + output("
    ! Error: %s
    " % err) + taken = time.time() - s + if taken > 0: + multipler = standard / taken + else: + multipler = 99 + if multipler < 0.3: + speed = "Sloooow" + elif multipler < 0.5: + speed = "Ehh" + elif multipler < 0.8: + speed = "Goodish" + elif multipler < 1.2: + speed = "OK" + elif multipler < 1.7: + speed = "Fine" + elif multipler < 2.5: + speed = "Fast" + elif multipler < 3.5: + speed = "WOW" + else: + speed = "Insane!!" + output("%.3fs [x%.2f: %s]
    " % (taken, multipler, speed)) + time.sleep(0.01) + + yield """ + + """ + + yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...
    " % (config.version, config.rev, sys.version, sys.platform) + + t = time.time() + + # CryptBitcoin + yield "
    CryptBitcoin:
    " + from Crypt import CryptBitcoin + + # seed = CryptBitcoin.newSeed() + # yield "- Seed: %s
    " % seed + seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38" + + with benchmark("hdPrivatekey x 10", 0.7): + for i in range(10): + privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10) + yield "." + valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" + assert privatekey == valid, "%s != %s" % (privatekey, valid) + + data = "Hello" * 1024 # 5k + with benchmark("sign x 10", 0.35): + for i in range(10): + yield "." + sign = CryptBitcoin.sign(data, privatekey) + valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w=" + assert sign == valid, "%s != %s" % (sign, valid) + + address = CryptBitcoin.privatekeyToAddress(privatekey) + if CryptBitcoin.opensslVerify: # Openssl avalible + with benchmark("openssl verify x 100", 0.37): + for i in range(100): + if i % 10 == 0: + yield "." + ok = CryptBitcoin.verify(data, address, sign) + assert ok, "does not verify from %s" % address + else: + yield " - openssl verify x 100...not avalible :(
    " + + openssl_verify_bk = CryptBitcoin.opensslVerify # Emulate openssl not found in any way + CryptBitcoin.opensslVerify = None + with benchmark("pure-python verify x 10", 1.6): + for i in range(10): + yield "." + ok = CryptBitcoin.verify(data, address, sign) + assert ok, "does not verify from %s" % address + CryptBitcoin.opensslVerify = openssl_verify_bk + + # CryptHash + yield "
    CryptHash:
    " + from Crypt import CryptHash + from cStringIO import StringIO + + data = StringIO("Hello" * 1024 * 1024) # 5m + with benchmark("sha256 5M x 10", 0.6): + for i in range(10): + data.seek(0) + hash = CryptHash.sha256sum(data) + yield "." + valid = "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa" + assert hash == valid, "%s != %s" % (hash, valid) + + data = StringIO("Hello" * 1024 * 1024) # 5m + with benchmark("sha512 5M x 10", 0.6): + for i in range(10): + data.seek(0) + hash = CryptHash.sha512sum(data) + yield "." + valid = "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d" + assert hash == valid, "%s != %s" % (hash, valid) + + with benchmark("os.urandom(256) x 1000", 0.0065): + for i in range(10): + for y in range(100): + data = os.urandom(256) + yield "." + + # Msgpack + import msgpack + yield "
    Msgpack: (version: %s)
    " % ".".join(map(str, msgpack.version)) + binary = 'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv' + data = {"int": 1024*1024*1024, "float": 12345.67890, "text": "hello"*1024, "binary": binary} + with benchmark("pack 5K x 10 000", 0.78): + for i in range(10): + for y in range(1000): + data_packed = msgpack.packb(data) + yield "." + valid = """\x84\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa6binary\xda\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv""" + assert data_packed == valid, "%s
    !=
    %s" % (repr(data_packed), repr(valid)) + + with benchmark("unpack 5K x 10 000", 1.2): + for i in range(10): + for y in range(1000): + data_unpacked = msgpack.unpackb(data_packed) + yield "." + assert data == data_unpacked, "%s != %s" % (data_unpack, data) + + with benchmark("streaming unpack 5K x 10 000", 1.4): + for i in range(10): + unpacker = msgpack.Unpacker() + for y in range(1000): + unpacker.feed(data_packed) + for data_unpacked in unpacker: + pass + yield "." + assert data == data_unpacked, "%s != %s" % (data_unpack, data) + + # Db + from Db import Db + import sqlite3 + yield "
    Db: (version: %s, API: %s)
    " % (sqlite3.sqlite_version, sqlite3.version) + + schema = { + "db_name": "TestDb", + "db_file": "%s/benchmark.db" % config.data_dir, + "maps": { + ".*": { + "to_table": { + "test": "test" + } + } + }, + "tables": { + "test": { + "cols": [ + ["test_id", "INTEGER"], + ["title", "TEXT"], + ["json_id", "INTEGER REFERENCES json (json_id)"] + ], + "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], + "schema_changed": 1426195822 + } + } + } + + if os.path.isfile("%s/benchmark.db" % config.data_dir): + os.unlink("%s/benchmark.db" % config.data_dir) + + with benchmark("Open x 10", 0.13): + for i in range(10): + db = Db(schema, "%s/benchmark.db" % config.data_dir) + db.checkTables() + db.close() + yield "." + + db = Db(schema, "%s/benchmark.db" % config.data_dir) + db.checkTables() + import json + + with benchmark("Insert x 10 x 1000", 1.0): + for u in range(10): # 10 user + data = {"test": []} + for i in range(1000): # 1000 line of data + data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) + json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) + db.updateJson("%s/test_%s.json" % (config.data_dir, u)) + os.unlink("%s/test_%s.json" % (config.data_dir, u)) + yield "." + + with benchmark("Buffered insert x 100 x 100", 1.3): + cur = db.getCursor() + cur.execute("BEGIN") + cur.logging = False + for u in range(100, 200): # 100 user + data = {"test": []} + for i in range(100): # 1000 line of data + data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) + json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w")) + db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur) + os.unlink("%s/test_%s.json" % (config.data_dir, u)) + if u % 10 == 0: + yield "." + cur.execute("COMMIT") + + yield " - Total rows in db: %s
    " % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0] + + with benchmark("Indexed query x 1000", 0.25): + found = 0 + cur = db.getCursor() + cur.logging = False + for i in range(1000): # 1000x by test_id + res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i) + for row in res: + found += 1 + if i % 100 == 0: + yield "." + + assert found == 20000, "Found: %s != 20000" % found + + with benchmark("Not indexed query x 100", 0.6): + found = 0 + cur = db.getCursor() + cur.logging = False + for i in range(100): # 1000x by test_id + res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i) + for row in res: + found += 1 + if i % 10 == 0: + yield "." + + assert found == 18900, "Found: %s != 18900" % found + + with benchmark("Like query x 100", 1.8): + found = 0 + cur = db.getCursor() + cur.logging = False + for i in range(100): # 1000x by test_id + res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i) + for row in res: + found += 1 + if i % 10 == 0: + yield "." + + assert found == 38900, "Found: %s != 11000" % found + + db.close() + if os.path.isfile("%s/benchmark.db" % config.data_dir): + os.unlink("%s/benchmark.db" % config.data_dir) + + gc.collect() # Implicit grabage collection + + # Zip + yield "
    Compression:
    " + import zipfile + test_data = "Test" * 1024 + file_name = "\xc3\x81rv\xc3\xadzt\xc5\xb0r\xc5\x91t\xc3\xbck\xc3\xb6r\xc3\xb3g\xc3\xa9p\xe4\xb8\xad\xe5\x8d\x8e%s.txt" + + with benchmark("Zip pack x 10", 0.12): + for i in range(10): + with zipfile.ZipFile('%s/test.zip' % config.data_dir, 'w') as archive: + for y in range(100): + zip_info = zipfile.ZipInfo(file_name % y, (1980,1,1,0,0,0)) + zip_info.compress_type = zipfile.ZIP_DEFLATED + zip_info.create_system = 3 + archive.writestr(zip_info, test_data) + yield "." + + hash = CryptHash.sha512sum(open("%s/test.zip" % config.data_dir, "rb")) + valid = "f6ef623e6653883a1758db14aa593350e26c9dc53a8406d6e6defd6029dbd483" + assert hash == valid, "Invalid hash: %s != %s
    " % (hash, valid) + + with benchmark("Zip unpack x 10", 0.2): + for i in range(10): + with zipfile.ZipFile('%s/test.zip' % config.data_dir) as archive: + for y in range(100): + assert archive.read(file_name % y) == test_data + yield "." + + if os.path.isfile("%s/test.zip" % config.data_dir): + os.unlink("%s/test.zip" % config.data_dir) + + # Tar.gz + import tarfile + import struct + + # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime + def nodate_write_gzip_header(self): + self.mtime = 0 + original_write_gzip_header(self) + + import gzip + original_write_gzip_header = gzip.GzipFile._write_gzip_header + gzip.GzipFile._write_gzip_header = nodate_write_gzip_header + + test_data_io = StringIO("Test" * 1024) + with benchmark("Tar.gz pack x 10", 0.3): + for i in range(10): + with tarfile.open('%s/test.tar.gz' % config.data_dir, 'w:gz') as archive: + for y in range(100): + test_data_io.seek(0) + tar_info = tarfile.TarInfo(file_name % y) + tar_info.size = 4 * 1024 + archive.addfile(tar_info, test_data_io) + yield "." + + hash = CryptHash.sha512sum(open("%s/test.tar.gz" % config.data_dir, "rb")) + valid = "4704ebd8c987ed6f833059f1de9c475d443b0539b8d4c4cb8b49b26f7bbf2d19" + assert hash == valid, "Invalid hash: %s != %s
    " % (hash, valid) + + with benchmark("Tar.gz unpack x 10", 0.2): + for i in range(10): + with tarfile.open('%s/test.tar.gz' % config.data_dir, 'r:gz') as archive: + for y in range(100): + assert archive.extractfile(file_name % y).read() == test_data + yield "." + + if os.path.isfile("%s/test.tar.gz" % config.data_dir): + os.unlink("%s/test.tar.gz" % config.data_dir) + + # Tar.bz2 + import tarfile + test_data_io = StringIO("Test" * 1024) + with benchmark("Tar.bz2 pack x 10", 2.0): + for i in range(10): + with tarfile.open('%s/test.tar.bz2' % config.data_dir, 'w:bz2') as archive: + for y in range(100): + test_data_io.seek(0) + tar_info = tarfile.TarInfo(file_name % y) + tar_info.size = 4 * 1024 + archive.addfile(tar_info, test_data_io) + yield "." + + hash = CryptHash.sha512sum(open("%s/test.tar.bz2" % config.data_dir, "rb")) + valid = "90cba0b4d9abaa37b830bf37e4adba93bfd183e095b489ebee62aaa94339f3b5" + assert hash == valid, "Invalid hash: %s != %s
    " % (hash, valid) + + with benchmark("Tar.bz2 unpack x 10", 0.5): + for i in range(10): + with tarfile.open('%s/test.tar.bz2' % config.data_dir, 'r:bz2') as archive: + for y in range(100): + assert archive.extractfile(file_name % y).read() == test_data + yield "." + + if os.path.isfile("%s/test.tar.bz2" % config.data_dir): + os.unlink("%s/test.tar.bz2" % config.data_dir) + + + yield "
    Done. Total: %.2fs" % (time.time() - t) + + def actionGcCollect(self): + import gc + self.sendHeader() + yield str(gc.collect()) diff --git a/plugins/Stats/__init__.py b/plugins/Stats/__init__.py new file mode 100644 index 00000000..90bd9d6e --- /dev/null +++ b/plugins/Stats/__init__.py @@ -0,0 +1 @@ +import StatsPlugin \ No newline at end of file diff --git a/plugins/TranslateSite/TranslateSitePlugin.py b/plugins/TranslateSite/TranslateSitePlugin.py new file mode 100644 index 00000000..f0112c11 --- /dev/null +++ b/plugins/TranslateSite/TranslateSitePlugin.py @@ -0,0 +1,67 @@ +import time + +from Plugin import PluginManager +from Translate import translate + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionSiteMedia(self, path, header_length=True): + file_name = path.split("/")[-1] + if not file_name: # Path ends with / + file_name = "index.html" + extension = file_name.split(".")[-1] + if translate.lang != "en" and extension in ["js", "html"]: + path_parts = self.parsePath(path) + file_generator = super(UiRequestPlugin, self).actionSiteMedia(path, header_length=False) + if "next" in dir(file_generator): # File found and generator returned + site = self.server.sites.get(path_parts["address"]) + return self.actionPatchFile(site, path_parts["inner_path"], file_generator) + else: + return file_generator + + else: + return super(UiRequestPlugin, self).actionSiteMedia(path) + + def actionUiMedia(self, path): + file_generator = super(UiRequestPlugin, self).actionUiMedia(path) + if translate.lang != "en" and path.endswith(".js"): + s = time.time() + data = "".join(list(file_generator)) + data = translate.translateData(data) + self.log.debug("Patched %s (%s bytes) in %.3fs" % (path, len(data), time.time() - s)) + return iter([data]) + else: + return file_generator + + def actionPatchFile(self, site, inner_path, file_generator): + content_json = site.content_manager.contents["content.json"] + lang_file = "languages/%s.json" % translate.lang + lang_file_exist = False + if site.settings.get("own"): # My site, check if the file is exist (allow to add new lang without signing) + if site.storage.isFile(lang_file): + lang_file_exist = True + else: # Not my site the reference in content.json is enough (will wait for download later) + if lang_file in content_json.get("files", {}): + lang_file_exist = True + + if not lang_file_exist or inner_path not in content_json.get("translate", []): + for part in file_generator: + if inner_path.endswith(".html"): + yield part.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache + else: + yield part + else: + s = time.time() + data = "".join(list(file_generator)) + + # if site.content_manager.contents["content.json"]["files"].get(lang_file): + site.needFile(lang_file, priority=10) + if inner_path.endswith("js"): + data = translate.translateData(data, site.storage.loadJson(lang_file), "js") + else: + data = translate.translateData(data, site.storage.loadJson(lang_file), "html") + data = data.replace("lang={lang}", "lang=" + str(translate.lang)) # lang get parameter to .js file to avoid cache + + self.log.debug("Patched %s (%s bytes) in %.3fs" % (inner_path, len(data), time.time() - s)) + yield data diff --git a/plugins/TranslateSite/__init__.py b/plugins/TranslateSite/__init__.py new file mode 100644 index 00000000..0b50ddc8 --- /dev/null +++ b/plugins/TranslateSite/__init__.py @@ -0,0 +1 @@ +import TranslateSitePlugin diff --git a/plugins/Trayicon/TrayiconPlugin.py b/plugins/Trayicon/TrayiconPlugin.py new file mode 100644 index 00000000..dc5da3f6 --- /dev/null +++ b/plugins/Trayicon/TrayiconPlugin.py @@ -0,0 +1,163 @@ +import os +import sys +import atexit + +from Plugin import PluginManager +from Config import config +from Translate import Translate + +allow_reload = False # No source reload supported in this plugin + +if "_" not in locals(): + _ = Translate("plugins/Trayicon/languages/") + + +@PluginManager.registerTo("Actions") +class ActionsPlugin(object): + + def main(self): + global notificationicon, winfolders + from lib import notificationicon, winfolders + import gevent.threadpool + + self.main = sys.modules["main"] + + fs_encoding = sys.getfilesystemencoding() + + icon = notificationicon.NotificationIcon( + os.path.join(os.path.dirname(os.path.abspath(__file__).decode(fs_encoding)), 'trayicon.ico'), + "ZeroNet %s" % config.version + ) + self.icon = icon + + if not config.debug: # Hide console if not in debug mode + notificationicon.hideConsole() + self.console = False + else: + self.console = True + + @atexit.register + def hideIcon(): + icon.die() + + ui_ip = config.ui_ip if config.ui_ip != "*" else "127.0.0.1" + + icon.items = [ + (self.titleIp, False), + (self.titleConnections, False), + (self.titleTransfer, False), + (self.titleConsole, self.toggleConsole), + (self.titleAutorun, self.toggleAutorun), + "--", + (_["ZeroNet Twitter"], lambda: self.opensite("https://twitter.com/HelloZeroNet")), + (_["ZeroNet Reddit"], lambda: self.opensite("http://www.reddit.com/r/zeronet/")), + (_["ZeroNet Github"], lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet")), + (_["Report bug/request feature"], lambda: self.opensite("https://github.com/HelloZeroNet/ZeroNet/issues")), + "--", + (_["!Open ZeroNet"], lambda: self.opensite("http://%s:%s/%s" % (ui_ip, config.ui_port, config.homepage))), + "--", + (_["Quit"], self.quit), + ] + + if not notificationicon.hasConsole(): + del icon.items[3] + + icon.clicked = lambda: self.opensite("http://%s:%s/%s" % (ui_ip, config.ui_port, config.homepage)) + self.quit_servers_event = gevent.threadpool.ThreadResult( + lambda res: gevent.spawn_later(0.1, self.quitServers) + ) # Fix gevent thread switch error + gevent.threadpool.start_new_thread(icon._run, ()) # Start in real thread (not gevent compatible) + super(ActionsPlugin, self).main() + icon._die = True + + def quit(self): + self.icon.die() + self.quit_servers_event.set(True) + + def quitServers(self): + self.main.ui_server.stop() + self.main.file_server.stop() + + def opensite(self, url): + import webbrowser + webbrowser.open(url, new=0) + + def titleIp(self): + title = "!IP: %s " % config.ip_external + if self.main.file_server.port_opened: + title += _["(active)"] + else: + title += _["(passive)"] + return title + + def titleConnections(self): + title = _["Connections: %s"] % len(self.main.file_server.connections) + return title + + def titleTransfer(self): + title = _["Received: %.2f MB | Sent: %.2f MB"] % ( + float(self.main.file_server.bytes_recv) / 1024 / 1024, + float(self.main.file_server.bytes_sent) / 1024 / 1024 + ) + return title + + def titleConsole(self): + translate = _["Show console window"] + if self.console: + return "+" + translate + else: + return translate + + def toggleConsole(self): + if self.console: + notificationicon.hideConsole() + self.console = False + else: + notificationicon.showConsole() + self.console = True + + def getAutorunPath(self): + return "%s\\zeronet.cmd" % winfolders.get(winfolders.STARTUP) + + def formatAutorun(self): + args = sys.argv[:] + + if not getattr(sys, 'frozen', False): # Not frozen + args.insert(0, sys.executable) + cwd = os.getcwd().decode(sys.getfilesystemencoding()) + else: + cwd = os.path.dirname(sys.executable).decode(sys.getfilesystemencoding()) + + if sys.platform == 'win32': + args = ['"%s"' % arg for arg in args if arg] + cmd = " ".join(args) + + # Dont open browser on autorun + cmd = cmd.replace("start.py", "zeronet.py").replace('"--open_browser"', "").replace('"default_browser"', "").strip() + cmd += ' --open_browser ""' + cmd = cmd.decode(sys.getfilesystemencoding()) + + return u""" + @echo off + chcp 65001 > nul + set PYTHONIOENCODING=utf-8 + cd /D \"%s\" + start "" %s + """ % (cwd, cmd) + + def isAutorunEnabled(self): + path = self.getAutorunPath() + return os.path.isfile(path) and open(path).read().decode("utf8") == self.formatAutorun() + + def titleAutorun(self): + translate = _["Start ZeroNet when Windows starts"] + if self.isAutorunEnabled(): + return "+" + translate + else: + return translate + + def toggleAutorun(self): + if self.isAutorunEnabled(): + os.unlink(self.getAutorunPath()) + else: + open(self.getAutorunPath(), "w").write(self.formatAutorun().encode("utf8")) diff --git a/plugins/Trayicon/__init__.py b/plugins/Trayicon/__init__.py new file mode 100644 index 00000000..5b584962 --- /dev/null +++ b/plugins/Trayicon/__init__.py @@ -0,0 +1,4 @@ +import sys + +if sys.platform == 'win32': + import TrayiconPlugin \ No newline at end of file diff --git a/plugins/Trayicon/languages/fr.json b/plugins/Trayicon/languages/fr.json new file mode 100644 index 00000000..ec335318 --- /dev/null +++ b/plugins/Trayicon/languages/fr.json @@ -0,0 +1,14 @@ +{ + "ZeroNet Twitter": "ZeroNet Twitter", + "ZeroNet Reddit": "ZeroNet Reddit", + "ZeroNet Github": "ZeroNet Github", + "Report bug/request feature": "Rapport d'erreur/Demanger une fonctionnalité", + "!Open ZeroNet": "!Ouvrir ZeroNet", + "Quit": "Quitter", + "(active)": "(actif)", + "(passive)": "(passif)", + "Connections: %s": "Connexions: %s", + "Received: %.2f MB | Sent: %.2f MB": "Reçu: %.2f MB | Envoyé: %.2f MB", + "Show console window": "Afficher la console", + "Start ZeroNet when Windows starts": "Lancer ZeroNet au démarrage de Windows" +} diff --git a/plugins/Trayicon/languages/hu.json b/plugins/Trayicon/languages/hu.json new file mode 100644 index 00000000..56fef23a --- /dev/null +++ b/plugins/Trayicon/languages/hu.json @@ -0,0 +1,14 @@ +{ + "ZeroNet Twitter": "ZeroNet Twitter", + "ZeroNet Reddit": "ZeroNet Reddit", + "ZeroNet Github": "ZeroNet Github", + "Report bug/request feature": "Hiba bejelentés/ötletek", + "!Open ZeroNet": "!ZeroNet megnyitása", + "Quit": "Kilépés", + "(active)": "(aktív)", + "(passive)": "(passive)", + "Connections: %s": "Kapcsolatok: %s", + "Received: %.2f MB | Sent: %.2f MB": "Fogadott: %.2f MB | Küldött: %.2f MB", + "Show console window": "Parancssor mutatása", + "Start ZeroNet when Windows starts": "ZeroNet indítása a Windows-al együtt" +} diff --git a/plugins/Trayicon/languages/it.json b/plugins/Trayicon/languages/it.json new file mode 100644 index 00000000..efbd6cfa --- /dev/null +++ b/plugins/Trayicon/languages/it.json @@ -0,0 +1,14 @@ +{ + "ZeroNet Twitter": "ZeroNet Twitter", + "ZeroNet Reddit": "ZeroNet Reddit", + "ZeroNet Github": "ZeroNet Github", + "Report bug/request feature": "Segnala bug/richiesta di una funzione", + "!Open ZeroNet": "!Apri ZeroNet", + "Quit": "Chiudi", + "(active)": "(attivo)", + "(passive)": "(passivo)", + "Connections: %s": "Connessioni: %s", + "Received: %.2f MB | Sent: %.2f MB": "Ricevuto: %.2f MB | Inviato: %.2f MB", + "Show console window": "Mostra finestra console", + "Start ZeroNet when Windows starts": "Avvia ZeroNet all'avvio di Windows" +} diff --git a/plugins/Trayicon/languages/pt-br.json b/plugins/Trayicon/languages/pt-br.json new file mode 100644 index 00000000..5e86493a --- /dev/null +++ b/plugins/Trayicon/languages/pt-br.json @@ -0,0 +1,14 @@ +{ + "ZeroNet Twitter": "ZeroNet Twitter", + "ZeroNet Reddit": "ZeroNet Reddit", + "ZeroNet Github": "ZeroNet Github", + "Report bug/request feature": "Reportar bug/sugerir recurso", + "!Open ZeroNet": "!Abrir ZeroNet", + "Quit": "Sair", + "(active)": "(ativo)", + "(passive)": "(passivo)", + "Connections: %s": "Conexões: %s", + "Received: %.2f MB | Sent: %.2f MB": "Recebido: %.2f MB | Enviado: %.2f MB", + "Show console window": "Mostrar console", + "Start ZeroNet when Windows starts": "Iniciar o ZeroNet quando o Windows for iniciado" +} diff --git a/plugins/Trayicon/languages/tr.json b/plugins/Trayicon/languages/tr.json new file mode 100644 index 00000000..077b8ddd --- /dev/null +++ b/plugins/Trayicon/languages/tr.json @@ -0,0 +1,14 @@ +{ + "ZeroNet Twitter": "ZeroNet Twitter", + "ZeroNet Reddit": "ZeroNet Reddit", + "ZeroNet Github": "ZeroNet Github", + "Report bug/request feature": "Hata bildir/geliştirme taleb et", + "!Open ZeroNet": "!ZeroNet'i Aç", + "Quit": "Kapat", + "(active)": "(aktif)", + "(passive)": "(pasif)", + "Connections: %s": "Bağlantı sayısı: %s", + "Received: %.2f MB | Sent: %.2f MB": "Gelen: %.2f MB | Gönderilen: %.2f MB", + "Show console window": "Konsolu aç", + "Start ZeroNet when Windows starts": "ZeroNet'i açılışta otomatik başlat" +} diff --git a/plugins/Trayicon/languages/zh-tw.json b/plugins/Trayicon/languages/zh-tw.json new file mode 100644 index 00000000..2189033e --- /dev/null +++ b/plugins/Trayicon/languages/zh-tw.json @@ -0,0 +1,14 @@ +{ + "ZeroNet Twitter": "ZeroNet Twitter", + "ZeroNet Reddit": "ZeroNet Reddit", + "ZeroNet Github": "ZeroNet Github", + "Report bug/request feature": "回饋问题/請求功能", + "!Open ZeroNet": "!開啟 ZeroNet", + "Quit": "退出", + "(active)": "(主動模式)", + "(passive)": "(被動模式)", + "Connections: %s": "連線數: %s", + "Received: %.2f MB | Sent: %.2f MB": "已收到: %.2f MB | 已傳送: %.2f MB", + "Show console window": "顯示控制臺窗體", + "Start ZeroNet when Windows starts": "在 Windows 啟動時執行 ZeroNet" +} diff --git a/plugins/Trayicon/languages/zh.json b/plugins/Trayicon/languages/zh.json new file mode 100644 index 00000000..29b73305 --- /dev/null +++ b/plugins/Trayicon/languages/zh.json @@ -0,0 +1,14 @@ +{ + "ZeroNet Twitter": "ZeroNet Twitter", + "ZeroNet Reddit": "ZeroNet Reddit", + "ZeroNet Github": "ZeroNet Github", + "Report bug/request feature": "反馈问题/请求功能", + "!Open ZeroNet": "!打开 ZeroNet", + "Quit": "退出", + "(active)": "(主动模式)", + "(passive)": "(被动模式)", + "Connections: %s": "连接数: %s", + "Received: %.2f MB | Sent: %.2f MB": "已接收: %.2f MB | 已发送: %.2f MB", + "Show console window": "显示控制台窗口", + "Start ZeroNet when Windows starts": "在 Windows 启动时运行 ZeroNet" +} diff --git a/plugins/Trayicon/lib/__init__.py b/plugins/Trayicon/lib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/plugins/Trayicon/lib/notificationicon.py b/plugins/Trayicon/lib/notificationicon.py new file mode 100644 index 00000000..bc7e2175 --- /dev/null +++ b/plugins/Trayicon/lib/notificationicon.py @@ -0,0 +1,724 @@ +# Pure ctypes windows taskbar notification icon +# via https://gist.github.com/jasonbot/5759510 +# Modified for ZeroNet + +import ctypes +import ctypes.wintypes +import os +import uuid +import time +import gevent + +__all__ = ['NotificationIcon'] + +# Create popup menu + +CreatePopupMenu = ctypes.windll.user32.CreatePopupMenu +CreatePopupMenu.restype = ctypes.wintypes.HMENU +CreatePopupMenu.argtypes = [] + +MF_BYCOMMAND = 0x0 +MF_BYPOSITION = 0x400 + +MF_BITMAP = 0x4 +MF_CHECKED = 0x8 +MF_DISABLED = 0x2 +MF_ENABLED = 0x0 +MF_GRAYED = 0x1 +MF_MENUBARBREAK = 0x20 +MF_MENUBREAK = 0x40 +MF_OWNERDRAW = 0x100 +MF_POPUP = 0x10 +MF_SEPARATOR = 0x800 +MF_STRING = 0x0 +MF_UNCHECKED = 0x0 + +InsertMenu = ctypes.windll.user32.InsertMenuW +InsertMenu.restype = ctypes.wintypes.BOOL +InsertMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.LPCWSTR] + +AppendMenu = ctypes.windll.user32.AppendMenuW +AppendMenu.restype = ctypes.wintypes.BOOL +AppendMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT, ctypes.wintypes.LPCWSTR] + +SetMenuDefaultItem = ctypes.windll.user32.SetMenuDefaultItem +SetMenuDefaultItem.restype = ctypes.wintypes.BOOL +SetMenuDefaultItem.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.wintypes.UINT] + +class POINT(ctypes.Structure): + _fields_ = [ ('x', ctypes.wintypes.LONG), + ('y', ctypes.wintypes.LONG)] + +GetCursorPos = ctypes.windll.user32.GetCursorPos +GetCursorPos.argtypes = [ctypes.POINTER(POINT)] + +SetForegroundWindow = ctypes.windll.user32.SetForegroundWindow +SetForegroundWindow.argtypes = [ctypes.wintypes.HWND] + +TPM_LEFTALIGN = 0x0 +TPM_CENTERALIGN = 0x4 +TPM_RIGHTALIGN = 0x8 + +TPM_TOPALIGN = 0x0 +TPM_VCENTERALIGN = 0x10 +TPM_BOTTOMALIGN = 0x20 + +TPM_NONOTIFY = 0x80 +TPM_RETURNCMD = 0x100 + +TPM_LEFTBUTTON = 0x0 +TPM_RIGHTBUTTON = 0x2 + +TPM_HORNEGANIMATION = 0x800 +TPM_HORPOSANIMATION = 0x400 +TPM_NOANIMATION = 0x4000 +TPM_VERNEGANIMATION = 0x2000 +TPM_VERPOSANIMATION = 0x1000 + +TrackPopupMenu = ctypes.windll.user32.TrackPopupMenu +TrackPopupMenu.restype = ctypes.wintypes.BOOL +TrackPopupMenu.argtypes = [ctypes.wintypes.HMENU, ctypes.wintypes.UINT, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.wintypes.HWND, ctypes.c_void_p] + +PostMessage = ctypes.windll.user32.PostMessageW +PostMessage.restype = ctypes.wintypes.BOOL +PostMessage.argtypes = [ctypes.wintypes.HWND, ctypes.wintypes.UINT, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM] + +DestroyMenu = ctypes.windll.user32.DestroyMenu +DestroyMenu.restype = ctypes.wintypes.BOOL +DestroyMenu.argtypes = [ctypes.wintypes.HMENU] + +# Create notification icon + +GUID = ctypes.c_ubyte * 16 + +class TimeoutVersionUnion(ctypes.Union): + _fields_ = [('uTimeout', ctypes.wintypes.UINT), + ('uVersion', ctypes.wintypes.UINT),] + +NIS_HIDDEN = 0x1 +NIS_SHAREDICON = 0x2 + +class NOTIFYICONDATA(ctypes.Structure): + def __init__(self, *args, **kwargs): + super(NOTIFYICONDATA, self).__init__(*args, **kwargs) + self.cbSize = ctypes.sizeof(self) + _fields_ = [ + ('cbSize', ctypes.wintypes.DWORD), + ('hWnd', ctypes.wintypes.HWND), + ('uID', ctypes.wintypes.UINT), + ('uFlags', ctypes.wintypes.UINT), + ('uCallbackMessage', ctypes.wintypes.UINT), + ('hIcon', ctypes.wintypes.HICON), + ('szTip', ctypes.wintypes.WCHAR * 64), + ('dwState', ctypes.wintypes.DWORD), + ('dwStateMask', ctypes.wintypes.DWORD), + ('szInfo', ctypes.wintypes.WCHAR * 256), + ('union', TimeoutVersionUnion), + ('szInfoTitle', ctypes.wintypes.WCHAR * 64), + ('dwInfoFlags', ctypes.wintypes.DWORD), + ('guidItem', GUID), + ('hBalloonIcon', ctypes.wintypes.HICON), + ] + +NIM_ADD = 0 +NIM_MODIFY = 1 +NIM_DELETE = 2 +NIM_SETFOCUS = 3 +NIM_SETVERSION = 4 + +NIF_MESSAGE = 1 +NIF_ICON = 2 +NIF_TIP = 4 +NIF_STATE = 8 +NIF_INFO = 16 +NIF_GUID = 32 +NIF_REALTIME = 64 +NIF_SHOWTIP = 128 + +NIIF_NONE = 0 +NIIF_INFO = 1 +NIIF_WARNING = 2 +NIIF_ERROR = 3 +NIIF_USER = 4 + +NOTIFYICON_VERSION = 3 +NOTIFYICON_VERSION_4 = 4 + +Shell_NotifyIcon = ctypes.windll.shell32.Shell_NotifyIconW +Shell_NotifyIcon.restype = ctypes.wintypes.BOOL +Shell_NotifyIcon.argtypes = [ctypes.wintypes.DWORD, ctypes.POINTER(NOTIFYICONDATA)] + +# Load icon/image + +IMAGE_BITMAP = 0 +IMAGE_ICON = 1 +IMAGE_CURSOR = 2 + +LR_CREATEDIBSECTION = 0x00002000 +LR_DEFAULTCOLOR = 0x00000000 +LR_DEFAULTSIZE = 0x00000040 +LR_LOADFROMFILE = 0x00000010 +LR_LOADMAP3DCOLORS = 0x00001000 +LR_LOADTRANSPARENT = 0x00000020 +LR_MONOCHROME = 0x00000001 +LR_SHARED = 0x00008000 +LR_VGACOLOR = 0x00000080 + +OIC_SAMPLE = 32512 +OIC_HAND = 32513 +OIC_QUES = 32514 +OIC_BANG = 32515 +OIC_NOTE = 32516 +OIC_WINLOGO = 32517 +OIC_WARNING = OIC_BANG +OIC_ERROR = OIC_HAND +OIC_INFORMATION = OIC_NOTE + +LoadImage = ctypes.windll.user32.LoadImageW +LoadImage.restype = ctypes.wintypes.HANDLE +LoadImage.argtypes = [ctypes.wintypes.HINSTANCE, ctypes.wintypes.LPCWSTR, ctypes.wintypes.UINT, ctypes.c_int, ctypes.c_int, ctypes.wintypes.UINT] + +# CreateWindow call + +WNDPROC = ctypes.WINFUNCTYPE(ctypes.c_int, ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM) +DefWindowProc = ctypes.windll.user32.DefWindowProcW +DefWindowProc.restype = ctypes.c_int +DefWindowProc.argtypes = [ctypes.wintypes.HWND, ctypes.c_uint, ctypes.wintypes.WPARAM, ctypes.wintypes.LPARAM] + +WS_OVERLAPPED = 0x00000000L +WS_POPUP = 0x80000000L +WS_CHILD = 0x40000000L +WS_MINIMIZE = 0x20000000L +WS_VISIBLE = 0x10000000L +WS_DISABLED = 0x08000000L +WS_CLIPSIBLINGS = 0x04000000L +WS_CLIPCHILDREN = 0x02000000L +WS_MAXIMIZE = 0x01000000L +WS_CAPTION = 0x00C00000L +WS_BORDER = 0x00800000L +WS_DLGFRAME = 0x00400000L +WS_VSCROLL = 0x00200000L +WS_HSCROLL = 0x00100000L +WS_SYSMENU = 0x00080000L +WS_THICKFRAME = 0x00040000L +WS_GROUP = 0x00020000L +WS_TABSTOP = 0x00010000L + +WS_MINIMIZEBOX = 0x00020000L +WS_MAXIMIZEBOX = 0x00010000L + +WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED | + WS_CAPTION | + WS_SYSMENU | + WS_THICKFRAME | + WS_MINIMIZEBOX | + WS_MAXIMIZEBOX) + +SM_XVIRTUALSCREEN = 76 +SM_YVIRTUALSCREEN = 77 +SM_CXVIRTUALSCREEN = 78 +SM_CYVIRTUALSCREEN = 79 +SM_CMONITORS = 80 +SM_SAMEDISPLAYFORMAT = 81 + +WM_NULL = 0x0000 +WM_CREATE = 0x0001 +WM_DESTROY = 0x0002 +WM_MOVE = 0x0003 +WM_SIZE = 0x0005 +WM_ACTIVATE = 0x0006 +WM_SETFOCUS = 0x0007 +WM_KILLFOCUS = 0x0008 +WM_ENABLE = 0x000A +WM_SETREDRAW = 0x000B +WM_SETTEXT = 0x000C +WM_GETTEXT = 0x000D +WM_GETTEXTLENGTH = 0x000E +WM_PAINT = 0x000F +WM_CLOSE = 0x0010 +WM_QUERYENDSESSION = 0x0011 +WM_QUIT = 0x0012 +WM_QUERYOPEN = 0x0013 +WM_ERASEBKGND = 0x0014 +WM_SYSCOLORCHANGE = 0x0015 +WM_ENDSESSION = 0x0016 +WM_SHOWWINDOW = 0x0018 +WM_CTLCOLOR = 0x0019 +WM_WININICHANGE = 0x001A +WM_SETTINGCHANGE = 0x001A +WM_DEVMODECHANGE = 0x001B +WM_ACTIVATEAPP = 0x001C +WM_FONTCHANGE = 0x001D +WM_TIMECHANGE = 0x001E +WM_CANCELMODE = 0x001F +WM_SETCURSOR = 0x0020 +WM_MOUSEACTIVATE = 0x0021 +WM_CHILDACTIVATE = 0x0022 +WM_QUEUESYNC = 0x0023 +WM_GETMINMAXINFO = 0x0024 +WM_PAINTICON = 0x0026 +WM_ICONERASEBKGND = 0x0027 +WM_NEXTDLGCTL = 0x0028 +WM_SPOOLERSTATUS = 0x002A +WM_DRAWITEM = 0x002B +WM_MEASUREITEM = 0x002C +WM_DELETEITEM = 0x002D +WM_VKEYTOITEM = 0x002E +WM_CHARTOITEM = 0x002F +WM_SETFONT = 0x0030 +WM_GETFONT = 0x0031 +WM_SETHOTKEY = 0x0032 +WM_GETHOTKEY = 0x0033 +WM_QUERYDRAGICON = 0x0037 +WM_COMPAREITEM = 0x0039 +WM_GETOBJECT = 0x003D +WM_COMPACTING = 0x0041 +WM_COMMNOTIFY = 0x0044 +WM_WINDOWPOSCHANGING = 0x0046 +WM_WINDOWPOSCHANGED = 0x0047 +WM_POWER = 0x0048 +WM_COPYDATA = 0x004A +WM_CANCELJOURNAL = 0x004B +WM_NOTIFY = 0x004E +WM_INPUTLANGCHANGEREQUEST = 0x0050 +WM_INPUTLANGCHANGE = 0x0051 +WM_TCARD = 0x0052 +WM_HELP = 0x0053 +WM_USERCHANGED = 0x0054 +WM_NOTIFYFORMAT = 0x0055 +WM_CONTEXTMENU = 0x007B +WM_STYLECHANGING = 0x007C +WM_STYLECHANGED = 0x007D +WM_DISPLAYCHANGE = 0x007E +WM_GETICON = 0x007F +WM_SETICON = 0x0080 +WM_NCCREATE = 0x0081 +WM_NCDESTROY = 0x0082 +WM_NCCALCSIZE = 0x0083 +WM_NCHITTEST = 0x0084 +WM_NCPAINT = 0x0085 +WM_NCACTIVATE = 0x0086 +WM_GETDLGCODE = 0x0087 +WM_SYNCPAINT = 0x0088 +WM_NCMOUSEMOVE = 0x00A0 +WM_NCLBUTTONDOWN = 0x00A1 +WM_NCLBUTTONUP = 0x00A2 +WM_NCLBUTTONDBLCLK = 0x00A3 +WM_NCRBUTTONDOWN = 0x00A4 +WM_NCRBUTTONUP = 0x00A5 +WM_NCRBUTTONDBLCLK = 0x00A6 +WM_NCMBUTTONDOWN = 0x00A7 +WM_NCMBUTTONUP = 0x00A8 +WM_NCMBUTTONDBLCLK = 0x00A9 +WM_KEYDOWN = 0x0100 +WM_KEYUP = 0x0101 +WM_CHAR = 0x0102 +WM_DEADCHAR = 0x0103 +WM_SYSKEYDOWN = 0x0104 +WM_SYSKEYUP = 0x0105 +WM_SYSCHAR = 0x0106 +WM_SYSDEADCHAR = 0x0107 +WM_KEYLAST = 0x0108 +WM_IME_STARTCOMPOSITION = 0x010D +WM_IME_ENDCOMPOSITION = 0x010E +WM_IME_COMPOSITION = 0x010F +WM_IME_KEYLAST = 0x010F +WM_INITDIALOG = 0x0110 +WM_COMMAND = 0x0111 +WM_SYSCOMMAND = 0x0112 +WM_TIMER = 0x0113 +WM_HSCROLL = 0x0114 +WM_VSCROLL = 0x0115 +WM_INITMENU = 0x0116 +WM_INITMENUPOPUP = 0x0117 +WM_MENUSELECT = 0x011F +WM_MENUCHAR = 0x0120 +WM_ENTERIDLE = 0x0121 +WM_MENURBUTTONUP = 0x0122 +WM_MENUDRAG = 0x0123 +WM_MENUGETOBJECT = 0x0124 +WM_UNINITMENUPOPUP = 0x0125 +WM_MENUCOMMAND = 0x0126 +WM_CTLCOLORMSGBOX = 0x0132 +WM_CTLCOLOREDIT = 0x0133 +WM_CTLCOLORLISTBOX = 0x0134 +WM_CTLCOLORBTN = 0x0135 +WM_CTLCOLORDLG = 0x0136 +WM_CTLCOLORSCROLLBAR = 0x0137 +WM_CTLCOLORSTATIC = 0x0138 +WM_MOUSEMOVE = 0x0200 +WM_LBUTTONDOWN = 0x0201 +WM_LBUTTONUP = 0x0202 +WM_LBUTTONDBLCLK = 0x0203 +WM_RBUTTONDOWN = 0x0204 +WM_RBUTTONUP = 0x0205 +WM_RBUTTONDBLCLK = 0x0206 +WM_MBUTTONDOWN = 0x0207 +WM_MBUTTONUP = 0x0208 +WM_MBUTTONDBLCLK = 0x0209 +WM_MOUSEWHEEL = 0x020A +WM_PARENTNOTIFY = 0x0210 +WM_ENTERMENULOOP = 0x0211 +WM_EXITMENULOOP = 0x0212 +WM_NEXTMENU = 0x0213 +WM_SIZING = 0x0214 +WM_CAPTURECHANGED = 0x0215 +WM_MOVING = 0x0216 +WM_DEVICECHANGE = 0x0219 +WM_MDICREATE = 0x0220 +WM_MDIDESTROY = 0x0221 +WM_MDIACTIVATE = 0x0222 +WM_MDIRESTORE = 0x0223 +WM_MDINEXT = 0x0224 +WM_MDIMAXIMIZE = 0x0225 +WM_MDITILE = 0x0226 +WM_MDICASCADE = 0x0227 +WM_MDIICONARRANGE = 0x0228 +WM_MDIGETACTIVE = 0x0229 +WM_MDISETMENU = 0x0230 +WM_ENTERSIZEMOVE = 0x0231 +WM_EXITSIZEMOVE = 0x0232 +WM_DROPFILES = 0x0233 +WM_MDIREFRESHMENU = 0x0234 +WM_IME_SETCONTEXT = 0x0281 +WM_IME_NOTIFY = 0x0282 +WM_IME_CONTROL = 0x0283 +WM_IME_COMPOSITIONFULL = 0x0284 +WM_IME_SELECT = 0x0285 +WM_IME_CHAR = 0x0286 +WM_IME_REQUEST = 0x0288 +WM_IME_KEYDOWN = 0x0290 +WM_IME_KEYUP = 0x0291 +WM_MOUSEHOVER = 0x02A1 +WM_MOUSELEAVE = 0x02A3 +WM_CUT = 0x0300 +WM_COPY = 0x0301 +WM_PASTE = 0x0302 +WM_CLEAR = 0x0303 +WM_UNDO = 0x0304 +WM_RENDERFORMAT = 0x0305 +WM_RENDERALLFORMATS = 0x0306 +WM_DESTROYCLIPBOARD = 0x0307 +WM_DRAWCLIPBOARD = 0x0308 +WM_PAINTCLIPBOARD = 0x0309 +WM_VSCROLLCLIPBOARD = 0x030A +WM_SIZECLIPBOARD = 0x030B +WM_ASKCBFORMATNAME = 0x030C +WM_CHANGECBCHAIN = 0x030D +WM_HSCROLLCLIPBOARD = 0x030E +WM_QUERYNEWPALETTE = 0x030F +WM_PALETTEISCHANGING = 0x0310 +WM_PALETTECHANGED = 0x0311 +WM_HOTKEY = 0x0312 +WM_PRINT = 0x0317 +WM_PRINTCLIENT = 0x0318 +WM_HANDHELDFIRST = 0x0358 +WM_HANDHELDLAST = 0x035F +WM_AFXFIRST = 0x0360 +WM_AFXLAST = 0x037F +WM_PENWINFIRST = 0x0380 +WM_PENWINLAST = 0x038F +WM_APP = 0x8000 +WM_USER = 0x0400 +WM_REFLECT = WM_USER + 0x1c00 + +class WNDCLASSEX(ctypes.Structure): + def __init__(self, *args, **kwargs): + super(WNDCLASSEX, self).__init__(*args, **kwargs) + self.cbSize = ctypes.sizeof(self) + _fields_ = [("cbSize", ctypes.c_uint), + ("style", ctypes.c_uint), + ("lpfnWndProc", WNDPROC), + ("cbClsExtra", ctypes.c_int), + ("cbWndExtra", ctypes.c_int), + ("hInstance", ctypes.wintypes.HANDLE), + ("hIcon", ctypes.wintypes.HANDLE), + ("hCursor", ctypes.wintypes.HANDLE), + ("hBrush", ctypes.wintypes.HANDLE), + ("lpszMenuName", ctypes.wintypes.LPCWSTR), + ("lpszClassName", ctypes.wintypes.LPCWSTR), + ("hIconSm", ctypes.wintypes.HANDLE)] + +ShowWindow = ctypes.windll.user32.ShowWindow +ShowWindow.argtypes = [ctypes.wintypes.HWND, ctypes.c_int] + +def GenerateDummyWindow(callback, uid): + newclass = WNDCLASSEX() + newclass.lpfnWndProc = callback + newclass.lpszClassName = uid.replace("-", "") + ATOM = ctypes.windll.user32.RegisterClassExW(ctypes.byref(newclass)) + hwnd = ctypes.windll.user32.CreateWindowExW(0, newclass.lpszClassName, None, WS_POPUP, 0, 0, 0, 0, 0, 0, 0, 0) + return hwnd + +# Message loop calls + +TIMERCALLBACK = ctypes.WINFUNCTYPE(None, + ctypes.wintypes.HWND, + ctypes.wintypes.UINT, + ctypes.POINTER(ctypes.wintypes.UINT), + ctypes.wintypes.DWORD) + +SetTimer = ctypes.windll.user32.SetTimer +SetTimer.restype = ctypes.POINTER(ctypes.wintypes.UINT) +SetTimer.argtypes = [ctypes.wintypes.HWND, + ctypes.POINTER(ctypes.wintypes.UINT), + ctypes.wintypes.UINT, + TIMERCALLBACK] + +KillTimer = ctypes.windll.user32.KillTimer +KillTimer.restype = ctypes.wintypes.BOOL +KillTimer.argtypes = [ctypes.wintypes.HWND, + ctypes.POINTER(ctypes.wintypes.UINT)] + +class MSG(ctypes.Structure): + _fields_ = [ ('HWND', ctypes.wintypes.HWND), + ('message', ctypes.wintypes.UINT), + ('wParam', ctypes.wintypes.WPARAM), + ('lParam', ctypes.wintypes.LPARAM), + ('time', ctypes.wintypes.DWORD), + ('pt', POINT)] + +GetMessage = ctypes.windll.user32.GetMessageW +GetMessage.restype = ctypes.wintypes.BOOL +GetMessage.argtypes = [ctypes.POINTER(MSG), ctypes.wintypes.HWND, ctypes.wintypes.UINT, ctypes.wintypes.UINT] + +TranslateMessage = ctypes.windll.user32.TranslateMessage +TranslateMessage.restype = ctypes.wintypes.ULONG +TranslateMessage.argtypes = [ctypes.POINTER(MSG)] + +DispatchMessage = ctypes.windll.user32.DispatchMessageW +DispatchMessage.restype = ctypes.wintypes.ULONG +DispatchMessage.argtypes = [ctypes.POINTER(MSG)] + +def LoadIcon(iconfilename, small=False): + return LoadImage(0, + unicode(iconfilename), + IMAGE_ICON, + 16 if small else 0, + 16 if small else 0, + LR_LOADFROMFILE) + + +class NotificationIcon(object): + def __init__(self, iconfilename, tooltip=None): + assert os.path.isfile(unicode(iconfilename)), "{} doesn't exist".format(iconfilename) + self._iconfile = unicode(iconfilename) + self._hicon = LoadIcon(self._iconfile, True) + assert self._hicon, "Failed to load {}".format(iconfilename) + #self._pumpqueue = Queue.Queue() + self._die = False + self._timerid = None + self._uid = uuid.uuid4() + self._tooltip = unicode(tooltip) if tooltip else u'' + #self._thread = threading.Thread(target=self._run) + #self._thread.start() + self._info_bubble = None + self.items = [] + + + def _bubble(self, iconinfo): + if self._info_bubble: + info_bubble = self._info_bubble + self._info_bubble = None + message = unicode(self._info_bubble) + iconinfo.uFlags |= NIF_INFO + iconinfo.szInfo = message + iconinfo.szInfoTitle = message + iconinfo.dwInfoFlags = NIIF_INFO + iconinfo.union.uTimeout = 10000 + Shell_NotifyIcon(NIM_MODIFY, ctypes.pointer(iconinfo)) + + + def _run(self): + self.WM_TASKBARCREATED = ctypes.windll.user32.RegisterWindowMessageW(u'TaskbarCreated') + + self._windowproc = WNDPROC(self._callback) + self._hwnd = GenerateDummyWindow(self._windowproc, str(self._uid)) + + iconinfo = NOTIFYICONDATA() + iconinfo.hWnd = self._hwnd + iconinfo.uID = 100 + iconinfo.uFlags = NIF_ICON | NIF_SHOWTIP | NIF_MESSAGE | (NIF_TIP if self._tooltip else 0) + iconinfo.uCallbackMessage = WM_MENUCOMMAND + iconinfo.hIcon = self._hicon + iconinfo.szTip = self._tooltip + + Shell_NotifyIcon(NIM_ADD, ctypes.pointer(iconinfo)) + + self.iconinfo = iconinfo + + PostMessage(self._hwnd, WM_NULL, 0, 0) + + message = MSG() + last_time = -1 + ret = None + while not self._die: + try: + ret = GetMessage(ctypes.pointer(message), 0, 0, 0) + TranslateMessage(ctypes.pointer(message)) + DispatchMessage(ctypes.pointer(message)) + except Exception, err: + # print "NotificationIcon error", err, message + message = MSG() + time.sleep(0.125) + print "Icon thread stopped, removing icon..." + + Shell_NotifyIcon(NIM_DELETE, ctypes.cast(ctypes.pointer(iconinfo), ctypes.POINTER(NOTIFYICONDATA))) + ctypes.windll.user32.DestroyWindow(self._hwnd) + ctypes.windll.user32.DestroyIcon(self._hicon) + + + def _menu(self): + if not hasattr(self, 'items'): + return + + menu = CreatePopupMenu() + func = None + + try: + iidx = 1000 + defaultitem = -1 + item_map = {} + for fs in self.items: + iidx += 1 + if isinstance(fs, basestring): + if fs and not fs.strip('-_='): + AppendMenu(menu, MF_SEPARATOR, iidx, fs) + else: + AppendMenu(menu, MF_STRING | MF_GRAYED, iidx, fs) + elif isinstance(fs, tuple): + if callable(fs[0]): + itemstring = fs[0]() + else: + itemstring = unicode(fs[0]) + flags = MF_STRING + if itemstring.startswith("!"): + itemstring = itemstring[1:] + defaultitem = iidx + if itemstring.startswith("+"): + itemstring = itemstring[1:] + flags = flags | MF_CHECKED + itemcallable = fs[1] + item_map[iidx] = itemcallable + if itemcallable is False: + flags = flags | MF_DISABLED + elif not callable(itemcallable): + flags = flags | MF_GRAYED + AppendMenu(menu, flags, iidx, itemstring) + + if defaultitem != -1: + SetMenuDefaultItem(menu, defaultitem, 0) + + pos = POINT() + GetCursorPos(ctypes.pointer(pos)) + + PostMessage(self._hwnd, WM_NULL, 0, 0) + + SetForegroundWindow(self._hwnd) + + ti = TrackPopupMenu(menu, TPM_RIGHTBUTTON | TPM_RETURNCMD | TPM_NONOTIFY, pos.x, pos.y, 0, self._hwnd, None) + + if ti in item_map: + func = item_map[ti] + + PostMessage(self._hwnd, WM_NULL, 0, 0) + finally: + DestroyMenu(menu) + if func: func() + + + def clicked(self): + self._menu() + + + + def _callback(self, hWnd, msg, wParam, lParam): + # Check if the main thread is still alive + if msg == WM_TIMER: + if not any(thread.getName() == 'MainThread' and thread.isAlive() + for thread in threading.enumerate()): + self._die = True + elif msg == WM_MENUCOMMAND and lParam == WM_LBUTTONUP: + self.clicked() + elif msg == WM_MENUCOMMAND and lParam == WM_RBUTTONUP: + self._menu() + elif msg == self.WM_TASKBARCREATED: # Explorer restarted, add the icon again. + Shell_NotifyIcon(NIM_ADD, ctypes.pointer(self.iconinfo)) + else: + return DefWindowProc(hWnd, msg, wParam, lParam) + return 1 + + + def die(self): + self._die = True + PostMessage(self._hwnd, WM_NULL, 0, 0) + time.sleep(0.2) + try: + Shell_NotifyIcon(NIM_DELETE, self.iconinfo) + except Exception, err: + print "Icon remove error", err + ctypes.windll.user32.DestroyWindow(self._hwnd) + ctypes.windll.user32.DestroyIcon(self._hicon) + + + def pump(self): + try: + while not self._pumpqueue.empty(): + callable = self._pumpqueue.get(False) + callable() + except Queue.Empty: + pass + + + def announce(self, text): + self._info_bubble = text + + +def hideConsole(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0) + +def showConsole(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 1) + +def hasConsole(): + return ctypes.windll.kernel32.GetConsoleWindow() != 0 + +if __name__ == "__main__": + import time + + def greet(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 0) + print "Hello" + + def quit(): + ni._die = True + + def announce(): + ctypes.windll.user32.ShowWindow(ctypes.windll.kernel32.GetConsoleWindow(), 1) + ni.announce("Hello there") + + def clicked(): + ni.announce("Hello") + + def dynamicTitle(): + return "!The time is: %s" % time.time() + + ni = NotificationIcon(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../trayicon.ico'), "ZeroNet 0.2.9") + ni.items = [ + (dynamicTitle, False), + ('Hello', greet), + ('Title', False), + ('!Default', greet), + ('+Popup bubble', announce), + 'Nothing', + '--', + ('Quit', quit) + ] + ni.clicked = clicked + import atexit + + @atexit.register + def goodbye(): + print "You are now leaving the Python sector." + + ni._run() \ No newline at end of file diff --git a/plugins/Trayicon/lib/winfolders.py b/plugins/Trayicon/lib/winfolders.py new file mode 100644 index 00000000..d28efc1a --- /dev/null +++ b/plugins/Trayicon/lib/winfolders.py @@ -0,0 +1,53 @@ +''' Get windows special folders without pythonwin + Example: + import specialfolders + start_programs = specialfolders.get(specialfolders.PROGRAMS) + +Code is public domain, do with it what you will. + +Luke Pinner - Environment.gov.au, 2010 February 10 +''' + +#Imports use _syntax to mask them from autocomplete IDE's +import ctypes as _ctypes +from ctypes.wintypes import HWND as _HWND, HANDLE as _HANDLE,DWORD as _DWORD,LPCWSTR as _LPCWSTR,MAX_PATH as _MAX_PATH, create_unicode_buffer as _cub +_SHGetFolderPath = _ctypes.windll.shell32.SHGetFolderPathW + +#public special folder constants +DESKTOP= 0 +PROGRAMS= 2 +MYDOCUMENTS= 5 +FAVORITES= 6 +STARTUP= 7 +RECENT= 8 +SENDTO= 9 +STARTMENU= 11 +MYMUSIC= 13 +MYVIDEOS= 14 +NETHOOD= 19 +FONTS= 20 +TEMPLATES= 21 +ALLUSERSSTARTMENU= 22 +ALLUSERSPROGRAMS= 23 +ALLUSERSSTARTUP= 24 +ALLUSERSDESKTOP= 25 +APPLICATIONDATA= 26 +PRINTHOOD= 27 +LOCALSETTINGSAPPLICATIONDATA= 28 +ALLUSERSFAVORITES= 31 +LOCALSETTINGSTEMPORARYINTERNETFILES=32 +COOKIES= 33 +LOCALSETTINGSHISTORY= 34 +ALLUSERSAPPLICATIONDATA= 35 + +def get(intFolder): + _SHGetFolderPath.argtypes = [_HWND, _ctypes.c_int, _HANDLE, _DWORD, _LPCWSTR] + auPathBuffer = _cub(_MAX_PATH) + exit_code=_SHGetFolderPath(0, intFolder, 0, 0, auPathBuffer) + return auPathBuffer.value + + +if __name__ == "__main__": + import os + print get(STARTUP) + open(get(STARTUP)+"\\zeronet.cmd", "w").write("cd /D %s\r\nzeronet.py" % os.getcwd()) \ No newline at end of file diff --git a/plugins/Trayicon/trayicon.ico b/plugins/Trayicon/trayicon.ico new file mode 100644 index 00000000..08617225 Binary files /dev/null and b/plugins/Trayicon/trayicon.ico differ diff --git a/plugins/Zeroname/SiteManagerPlugin.py b/plugins/Zeroname/SiteManagerPlugin.py new file mode 100644 index 00000000..2d1979d8 --- /dev/null +++ b/plugins/Zeroname/SiteManagerPlugin.py @@ -0,0 +1,83 @@ +import logging +import re +import time + +from Config import config +from Plugin import PluginManager + +allow_reload = False # No reload supported + +log = logging.getLogger("ZeronamePlugin") + + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + site_zeroname = None + db_domains = None + db_domains_modified = None + + def load(self, *args, **kwargs): + super(SiteManagerPlugin, self).load(*args, **kwargs) + if not self.get(config.bit_resolver): + self.need(config.bit_resolver) # Need ZeroName site + + # Checks if its a valid address + def isAddress(self, address): + if self.isDomain(address): + return True + else: + return super(SiteManagerPlugin, self).isAddress(address) + + # Return: True if the address is domain + def isDomain(self, address): + return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + + # Resolve domain + # Return: The address or None + def resolveDomain(self, domain): + domain = domain.lower() + if not self.site_zeroname: + self.site_zeroname = self.need(config.bit_resolver) + + site_zeroname_modified = self.site_zeroname.content_manager.contents.get("content.json", {}).get("modified", 0) + if not self.db_domains or self.db_domains_modified != site_zeroname_modified: + self.site_zeroname.needFile("data/names.json", priority=10) + s = time.time() + self.db_domains = self.site_zeroname.storage.loadJson("data/names.json") + log.debug( + "Domain db with %s entries loaded in %.3fs (modification: %s -> %s)" % + (len(self.db_domains), time.time() - s, self.db_domains_modified, site_zeroname_modified) + ) + self.db_domains_modified = site_zeroname_modified + return self.db_domains.get(domain) + + # Return or create site and start download site files + # Return: Site or None if dns resolve failed + def need(self, address, all_file=True): + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: + address = address_resolved + else: + return None + + return super(SiteManagerPlugin, self).need(address, all_file) + + # Return: Site object or None if not found + def get(self, address): + if self.sites is None: # Not loaded yet + self.load() + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: # Domain found + site = self.sites.get(address_resolved) + if site: + site_domain = site.settings.get("domain") + if site_domain != address: + site.settings["domain"] = address + else: # Domain not found + site = self.sites.get(address) + + else: # Access by site address + site = self.sites.get(address) + return site diff --git a/plugins/Zeroname/UiRequestPlugin.py b/plugins/Zeroname/UiRequestPlugin.py new file mode 100644 index 00000000..ea9bfcbe --- /dev/null +++ b/plugins/Zeroname/UiRequestPlugin.py @@ -0,0 +1,61 @@ +import re + +from Plugin import PluginManager + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + + def __init__(self, *args, **kwargs): + from Site import SiteManager + self.site_manager = SiteManager.site_manager + super(UiRequestPlugin, self).__init__(*args, **kwargs) + + # Media request + def actionSiteMedia(self, path, header_length=True): + match = re.match("/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/" + address + match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path, header_length=header_length) # Get the wrapper frame output + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_path = re.sub("\?.*", "", referer_path) # Remove http params + + if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]): # Different origin + return False + + if self.isProxyRequest(): # Match to site domain + referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access + match = re.match("http[s]{0,1}://(.*?)(/|$)", referer) + if match: + referer_site_address = match.group(1) + else: + referer_site_address = None + else: # Match to request path + match = re.match("/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path) + if match: + referer_site_address = match.group("address") + else: + referer_site_address = None + + if not referer_site_address: + return False + elif referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("Zeroname plugin") + group.add_argument('--bit_resolver', help='ZeroNet site to resolve .bit domains', default="1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F", metavar="address") + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/Zeroname/__init__.py b/plugins/Zeroname/__init__.py new file mode 100644 index 00000000..889802db --- /dev/null +++ b/plugins/Zeroname/__init__.py @@ -0,0 +1,2 @@ +import UiRequestPlugin +import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/Zeroname/updater/zeroname_updater.py b/plugins/Zeroname/updater/zeroname_updater.py new file mode 100644 index 00000000..711ee1bc --- /dev/null +++ b/plugins/Zeroname/updater/zeroname_updater.py @@ -0,0 +1,210 @@ +import time +import json +import os +import sys +import re +import socket + +from subprocess import call +from bitcoinrpc.authproxy import AuthServiceProxy + + +def publish(): + print "* Signing and Publishing..." + call(" ".join(command_sign_publish), shell=True) + + +def processNameOp(domain, value, test=False): + if not value.strip().startswith("{"): + return False + try: + data = json.loads(value) + except Exception, err: + print "Json load error: %s" % err + return False + if "zeronet" not in data: + print "No zeronet in ", data.keys() + return False + if not isinstance(data["zeronet"], dict): + print "Not dict: ", data["zeronet"] + return False + if not re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", domain): + print "Invalid domain: ", domain + return False + + if test: + return True + + if "slave" in sys.argv: + print "Waiting for master update arrive" + time.sleep(30) # Wait 30 sec to allow master updater + + # Note: Requires the file data/names.json to exist and contain "{}" to work + names_raw = open(names_path, "rb").read() + names = json.loads(names_raw) + for subdomain, address in data["zeronet"].items(): + subdomain = subdomain.lower() + address = re.sub("[^A-Za-z0-9]", "", address) + print subdomain, domain, "->", address + if subdomain: + if re.match("^[a-z0-9]([a-z0-9-]{0,62}[a-z0-9])?$", subdomain): + names["%s.%s.bit" % (subdomain, domain)] = address + else: + print "Invalid subdomain:", domain, subdomain + else: + names["%s.bit" % domain] = address + + new_names_raw = json.dumps(names, indent=2, sort_keys=True) + if new_names_raw != names_raw: + open(names_path, "wb").write(new_names_raw) + print "-", domain, "Changed" + return True + else: + print "-", domain, "Not changed" + return False + + +def processBlock(block_id, test=False): + print "Processing block #%s..." % block_id + s = time.time() + block_hash = rpc.getblockhash(block_id) + block = rpc.getblock(block_hash) + + print "Checking %s tx" % len(block["tx"]) + updated = 0 + for tx in block["tx"]: + try: + transaction = rpc.getrawtransaction(tx, 1) + for vout in transaction.get("vout", []): + if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]: + name_op = vout["scriptPubKey"]["nameOp"] + updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"], test) + except Exception, err: + print "Error processing tx #%s %s" % (tx, err) + print "Done in %.3fs (updated %s)." % (time.time() - s, updated) + return updated + +# Connecting to RPC +def initRpc(config): + """Initialize Namecoin RPC""" + rpc_data = { + 'connect': '127.0.0.1', + 'port': '8336', + 'user': 'PLACEHOLDER', + 'password': 'PLACEHOLDER', + 'clienttimeout': '900' + } + try: + fptr = open(config, 'r') + lines = fptr.readlines() + fptr.close() + except: + return None # Or take some other appropriate action + + for line in lines: + if not line.startswith('rpc'): + continue + key_val = line.split(None, 1)[0] + (key, val) = key_val.split('=', 1) + if not key or not val: + continue + rpc_data[key[3:]] = val + + url = 'http://%(user)s:%(password)s@%(connect)s:%(port)s' % rpc_data + + return url, int(rpc_data['clienttimeout']) + +# Loading config... + +# Check whether platform is on windows or linux +# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin + +if sys.platform == "win32": + namecoin_location = os.getenv('APPDATA') + "/Namecoin/" +else: + namecoin_location = os.path.expanduser("~/.namecoin/") + +config_path = namecoin_location + 'zeroname_config.json' +if not os.path.isfile(config_path): # Create sample config + open(config_path, "w").write( + json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet', 'privatekey': '', 'lastprocessed': 223910}, indent=2) + ) + print "* Example config written to %s" % config_path + sys.exit(0) + +config = json.load(open(config_path)) +names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"]) +os.chdir(config["zeronet_path"]) # Change working dir - tells script where Zeronet install is. + +# Parameters to sign and publish +command_sign_publish = [sys.executable, "zeronet.py", "siteSign", config["site"], config["privatekey"], "--publish"] +if sys.platform == 'win32': + command_sign_publish = ['"%s"' % param for param in command_sign_publish] + +# Initialize rpc connection +rpc_auth, rpc_timeout = initRpc(namecoin_location + "namecoin.conf") +rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) + +while 1: + try: + time.sleep(1) + last_block = int(rpc.getinfo()["blocks"]) + break # Connection succeeded + except socket.timeout: # Timeout + print ".", + sys.stdout.flush() + except Exception, err: + print "Exception", err.__class__, err + time.sleep(5) + rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) + +if not config["lastprocessed"]: # First startup: Start processing from last block + config["lastprocessed"] = last_block + + +print "- Testing domain parsing..." +assert processBlock(223911, test=True) # Testing zeronetwork.bit +assert processBlock(227052, test=True) # Testing brainwallets.bit +assert not processBlock(236824, test=True) # Utf8 domain name (invalid should skip) +assert not processBlock(236752, test=True) # Uppercase domain (invalid should skip) +assert processBlock(236870, test=True) # Encoded domain (should pass) +# sys.exit(0) + +print "- Parsing skipped blocks..." +should_publish = False +for block_id in range(config["lastprocessed"], last_block + 1): + if processBlock(block_id): + should_publish = True +config["lastprocessed"] = last_block + +if should_publish: + publish() + +while 1: + print "- Waiting for new block" + sys.stdout.flush() + while 1: + try: + time.sleep(1) + rpc.waitforblock() + print "Found" + break # Block found + except socket.timeout: # Timeout + print ".", + sys.stdout.flush() + except Exception, err: + print "Exception", err.__class__, err + time.sleep(5) + rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) + + last_block = int(rpc.getinfo()["blocks"]) + should_publish = False + for block_id in range(config["lastprocessed"] + 1, last_block + 1): + if processBlock(block_id): + should_publish = True + + config["lastprocessed"] = last_block + open(config_path, "w").write(json.dumps(config, indent=2)) + + if should_publish: + publish() diff --git a/plugins/disabled-Bootstrapper/BootstrapperDb.py b/plugins/disabled-Bootstrapper/BootstrapperDb.py new file mode 100644 index 00000000..a3a91589 --- /dev/null +++ b/plugins/disabled-Bootstrapper/BootstrapperDb.py @@ -0,0 +1,157 @@ +import time +import re + +import gevent + +from Config import config +from Db import Db +from util import helper + + +class BootstrapperDb(Db): + def __init__(self): + self.version = 6 + self.hash_ids = {} # hash -> id cache + super(BootstrapperDb, self).__init__({"db_name": "Bootstrapper"}, "%s/bootstrapper.db" % config.data_dir) + self.foreign_keys = True + self.checkTables() + self.updateHashCache() + gevent.spawn(self.cleanup) + + def cleanup(self): + while 1: + self.execute("DELETE FROM peer WHERE date_announced < DATETIME('now', '-40 minute')") + time.sleep(4*60) + + def updateHashCache(self): + res = self.execute("SELECT * FROM hash") + self.hash_ids = {str(row["hash"]): row["hash_id"] for row in res} + self.log.debug("Loaded %s hash_ids" % len(self.hash_ids)) + + def checkTables(self): + version = int(self.execute("PRAGMA user_version").fetchone()[0]) + self.log.debug("Db version: %s, needed: %s" % (version, self.version)) + if version < self.version: + self.createTables() + else: + self.execute("VACUUM") + + def createTables(self): + # Delete all tables + self.execute("PRAGMA writable_schema = 1") + self.execute("DELETE FROM sqlite_master WHERE type IN ('table', 'index', 'trigger')") + self.execute("PRAGMA writable_schema = 0") + self.execute("VACUUM") + self.execute("PRAGMA INTEGRITY_CHECK") + # Create new tables + self.execute(""" + CREATE TABLE peer ( + peer_id INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE, + port INTEGER NOT NULL, + ip4 TEXT, + onion TEXT, + date_added DATETIME DEFAULT (CURRENT_TIMESTAMP), + date_announced DATETIME DEFAULT (CURRENT_TIMESTAMP) + ); + """) + + self.execute(""" + CREATE TABLE peer_to_hash ( + peer_to_hash_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE NOT NULL, + peer_id INTEGER REFERENCES peer (peer_id) ON DELETE CASCADE, + hash_id INTEGER REFERENCES hash (hash_id) + ); + """) + self.execute("CREATE INDEX peer_id ON peer_to_hash (peer_id);") + self.execute("CREATE INDEX hash_id ON peer_to_hash (hash_id);") + + self.execute(""" + CREATE TABLE hash ( + hash_id INTEGER PRIMARY KEY AUTOINCREMENT UNIQUE NOT NULL, + hash BLOB UNIQUE NOT NULL, + date_added DATETIME DEFAULT (CURRENT_TIMESTAMP) + ); + """) + self.execute("PRAGMA user_version = %s" % self.version) + + def getHashId(self, hash): + if hash not in self.hash_ids: + self.log.debug("New hash: %s" % repr(hash)) + self.execute("INSERT OR IGNORE INTO hash ?", {"hash": buffer(hash)}) + self.hash_ids[hash] = self.cur.cursor.lastrowid + return self.hash_ids[hash] + + def peerAnnounce(self, ip4=None, onion=None, port=None, hashes=[], onion_signed=False, delete_missing_hashes=False): + hashes_ids_announced = [] + for hash in hashes: + hashes_ids_announced.append(self.getHashId(hash)) + + if not ip4 and not onion: + return 0 + + # Check user + if onion: + res = self.execute("SELECT * FROM peer WHERE ? LIMIT 1", {"onion": onion}) + else: + res = self.execute("SELECT * FROM peer WHERE ? LIMIT 1", {"ip4": ip4, "port": port}) + + user_row = res.fetchone() + if user_row: + peer_id = user_row["peer_id"] + self.execute("UPDATE peer SET date_announced = DATETIME('now') WHERE ?", {"peer_id": peer_id}) + else: + self.log.debug("New peer: %s %s signed: %s" % (ip4, onion, onion_signed)) + if onion and not onion_signed: + return len(hashes) + self.execute("INSERT INTO peer ?", {"ip4": ip4, "onion": onion, "port": port}) + peer_id = self.cur.cursor.lastrowid + + # Check user's hashes + res = self.execute("SELECT * FROM peer_to_hash WHERE ?", {"peer_id": peer_id}) + hash_ids_db = [row["hash_id"] for row in res] + if hash_ids_db != hashes_ids_announced: + hash_ids_added = set(hashes_ids_announced) - set(hash_ids_db) + hash_ids_removed = set(hash_ids_db) - set(hashes_ids_announced) + if not onion or onion_signed: + for hash_id in hash_ids_added: + self.execute("INSERT INTO peer_to_hash ?", {"peer_id": peer_id, "hash_id": hash_id}) + if hash_ids_removed and delete_missing_hashes: + self.execute("DELETE FROM peer_to_hash WHERE ?", {"peer_id": peer_id, "hash_id": list(hash_ids_removed)}) + + return len(hash_ids_added) + len(hash_ids_removed) + else: + return 0 + + def peerList(self, hash, ip4=None, onions=[], port=None, limit=30, need_types=["ip4", "onion"]): + hash_peers = {"ip4": [], "onion": []} + if limit == 0: + return hash_peers + hashid = self.getHashId(hash) + + where = "hash_id = :hashid" + if onions: + onions_escaped = ["'%s'" % re.sub("[^a-z0-9,]", "", onion) for onion in onions if type(onion) is str] + where += " AND (onion NOT IN (%s) OR onion IS NULL)" % ",".join(onions_escaped) + elif ip4: + where += " AND (NOT (ip4 = :ip4 AND port = :port) OR ip4 IS NULL)" + + query = """ + SELECT ip4, port, onion + FROM peer_to_hash + LEFT JOIN peer USING (peer_id) + WHERE %s + ORDER BY date_announced DESC + LIMIT :limit + """ % where + res = self.execute(query, {"hashid": hashid, "ip4": ip4, "onions": onions, "port": port, "limit": limit}) + + for row in res: + if row["ip4"] and "ip4" in need_types: + hash_peers["ip4"].append( + helper.packAddress(row["ip4"], row["port"]) + ) + if row["onion"] and "onion" in need_types: + hash_peers["onion"].append( + helper.packOnionAddress(row["onion"], row["port"]) + ) + return hash_peers diff --git a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py new file mode 100644 index 00000000..7577794c --- /dev/null +++ b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py @@ -0,0 +1,106 @@ +import time + +from Plugin import PluginManager +from BootstrapperDb import BootstrapperDb +from Crypt import CryptRsa + +if "db" not in locals().keys(): # Share durin reloads + db = BootstrapperDb() + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def actionAnnounce(self, params): + hashes = params["hashes"] + + if "onion_signs" in params and len(params["onion_signs"]) == len(set(params["onions"])): + # Check if all sign is correct + if time.time() - float(params["onion_sign_this"]) < 3*60: # Peer has 3 minute to sign the message + onions_signed = [] + # Check onion signs + for onion_publickey, onion_sign in params["onion_signs"].items(): + if CryptRsa.verify(params["onion_sign_this"], onion_publickey, onion_sign): + onions_signed.append(CryptRsa.publickeyToOnion(onion_publickey)) + else: + break + # Check if the same onion addresses signed as the announced onces + if sorted(onions_signed) == sorted(set(params["onions"])): + all_onions_signed = True + else: + all_onions_signed = False + else: + # Onion sign this out of 3 minute + all_onions_signed = False + else: + # Incorrect signs number + all_onions_signed = False + + if "ip4" in params["add"] and self.connection.ip != "127.0.0.1" and not self.connection.ip.endswith(".onion"): + ip4 = self.connection.ip + else: + ip4 = None + + # Separatley add onions to sites or at once if no onions present + hashes_changed = 0 + i = 0 + for onion in params.get("onions", []): + hashes_changed += db.peerAnnounce( + onion=onion, + port=params["port"], + hashes=[hashes[i]], + onion_signed=all_onions_signed + ) + i += 1 + + # Announce all sites if ip4 defined + if ip4: + hashes_changed += db.peerAnnounce( + ip4=ip4, + port=params["port"], + hashes=hashes, + delete_missing_hashes=params.get("delete") + ) + + # Query sites + back = {} + peers = [] + if params.get("onions") and not all_onions_signed and hashes_changed: + back["onion_sign_this"] = "%.0f" % time.time() # Send back nonce for signing + + for hash in hashes: + hash_peers = db.peerList( + hash, + ip4=self.connection.ip, onions=params.get("onions"), port=params["port"], + limit=min(30, params["need_num"]), need_types=params["need_types"] + ) + peers.append(hash_peers) + + back["peers"] = peers + self.response(back) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionStatsBootstrapper(self): + self.sendHeader() + + # Style + yield """ + + """ + + hash_rows = db.execute("SELECT * FROM hash").fetchall() + for hash_row in hash_rows: + peer_rows = db.execute( + "SELECT * FROM peer LEFT JOIN peer_to_hash USING (peer_id) WHERE hash_id = :hash_id", + {"hash_id": hash_row["hash_id"]} + ).fetchall() + + yield "
    %s (added: %s, peers: %s)
    " % ( + str(hash_row["hash"]).encode("hex"), hash_row["date_added"], len(peer_rows) + ) + for peer_row in peer_rows: + yield " - {ip4: <30} {onion: <30} added: {date_added}, announced: {date_announced}
    ".format(**dict(peer_row)) diff --git a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py new file mode 100644 index 00000000..a79c052f --- /dev/null +++ b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py @@ -0,0 +1,185 @@ +import hashlib +import os + +import pytest + +from Bootstrapper import BootstrapperPlugin +from Bootstrapper.BootstrapperDb import BootstrapperDb +from Peer import Peer +from Crypt import CryptRsa +from util import helper + + +@pytest.fixture() +def bootstrapper_db(request): + BootstrapperPlugin.db.close() + BootstrapperPlugin.db = BootstrapperDb() + BootstrapperPlugin.db.createTables() # Reset db + BootstrapperPlugin.db.cur.logging = True + + def cleanup(): + BootstrapperPlugin.db.close() + os.unlink(BootstrapperPlugin.db.db_path) + + request.addfinalizer(cleanup) + return BootstrapperPlugin.db + + +@pytest.mark.usefixtures("resetSettings") +class TestBootstrapper: + def testIp4(self, file_server, bootstrapper_db): + peer = Peer("127.0.0.1", 1544, connection_server=file_server) + hash1 = hashlib.sha256("site1").digest() + hash2 = hashlib.sha256("site2").digest() + hash3 = hashlib.sha256("site3").digest() + + # Verify empty result + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + }) + + assert len(res["peers"][0]["ip4"]) == 0 # Empty result + + # Verify added peer on previous request + bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1, hash2], delete_missing_hashes=True) + + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + }) + assert len(res["peers"][0]["ip4"]) == 1 + assert len(res["peers"][1]["ip4"]) == 1 + + # hash2 deleted from 1.2.3.4 + bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1], delete_missing_hashes=True) + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + }) + assert len(res["peers"][0]["ip4"]) == 1 + assert len(res["peers"][1]["ip4"]) == 0 + + # Announce 3 hash again + bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1, hash2, hash3], delete_missing_hashes=True) + res = peer.request("announce", { + "hashes": [hash1, hash2, hash3], + "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + }) + assert len(res["peers"][0]["ip4"]) == 1 + assert len(res["peers"][1]["ip4"]) == 1 + assert len(res["peers"][2]["ip4"]) == 1 + + # Single hash announce + res = peer.request("announce", { + "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + }) + assert len(res["peers"][0]["ip4"]) == 1 + + # Test DB cleanup + assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 1 # 127.0.0.1 never get added to db + + # Delete peers + bootstrapper_db.execute("DELETE FROM peer WHERE ip4 = '1.2.3.4'") + assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer_to_hash").fetchone()["num"] == 0 + + assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM hash").fetchone()["num"] == 3 # 3 sites + assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 0 # 0 peer + + def testPassive(self, file_server, bootstrapper_db): + peer = Peer("127.0.0.1", 1544, connection_server=file_server) + hash1 = hashlib.sha256("hash1").digest() + + bootstrapper_db.peerAnnounce(ip4=None, port=15441, hashes=[hash1]) + res = peer.request("announce", { + "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] + }) + + assert len(res["peers"][0]["ip4"]) == 0 # Empty result + + def testAddOnion(self, file_server, site, bootstrapper_db, tor_manager): + onion1 = tor_manager.addOnion() + onion2 = tor_manager.addOnion() + peer = Peer("127.0.0.1", 1544, connection_server=file_server) + hash1 = hashlib.sha256("site1").digest() + hash2 = hashlib.sha256("site2").digest() + hash3 = hashlib.sha256("site3").digest() + + bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=1234, hashes=[hash1, hash2, hash3]) + res = peer.request("announce", { + "onions": [onion1, onion1, onion2], + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert len(res["peers"][0]["ip4"]) == 1 + + # Onion address not added yet + site_peers = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers["onion"]) == 0 + assert "onion_sign_this" in res + + # Sign the nonces + sign1 = CryptRsa.sign(res["onion_sign_this"], tor_manager.getPrivatekey(onion1)) + sign2 = CryptRsa.sign(res["onion_sign_this"], tor_manager.getPrivatekey(onion2)) + + # Bad sign (different address) + res = peer.request("announce", { + "onions": [onion1], "onion_sign_this": res["onion_sign_this"], + "onion_signs": {tor_manager.getPublickey(onion2): sign2}, + "hashes": [hash1], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert "onion_sign_this" in res + site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers1["onion"]) == 0 # Not added + + # Bad sign (missing one) + res = peer.request("announce", { + "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], + "onion_signs": {tor_manager.getPublickey(onion1): sign1}, + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert "onion_sign_this" in res + site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers1["onion"]) == 0 # Not added + + # Good sign + res = peer.request("announce", { + "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], + "onion_signs": {tor_manager.getPublickey(onion1): sign1, tor_manager.getPublickey(onion2): sign2}, + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + }) + assert "onion_sign_this" not in res + + # Onion addresses added + site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + assert len(site_peers1["onion"]) == 1 + site_peers2 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash2) + assert len(site_peers2["onion"]) == 1 + site_peers3 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash3) + assert len(site_peers3["onion"]) == 1 + + assert site_peers1["onion"][0] == site_peers2["onion"][0] + assert site_peers2["onion"][0] != site_peers3["onion"][0] + assert helper.unpackOnionAddress(site_peers1["onion"][0])[0] == onion1+".onion" + assert helper.unpackOnionAddress(site_peers2["onion"][0])[0] == onion1+".onion" + assert helper.unpackOnionAddress(site_peers3["onion"][0])[0] == onion2+".onion" + + tor_manager.delOnion(onion1) + tor_manager.delOnion(onion2) + + + def testRequestPeers(self, file_server, site, bootstrapper_db, tor_manager): + site.connection_server = file_server + site.connection_server.tor_manager = tor_manager + hash = hashlib.sha256(site.address).digest() + + # Request peers from tracker + assert len(site.peers) == 0 + bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=1234, hashes=[hash]) + site.announceTracker("zero", "127.0.0.1:1544") + assert len(site.peers) == 1 + + # Test onion address store + bootstrapper_db.peerAnnounce(onion="bka4ht2bzxchy44r", port=1234, hashes=[hash], onion_signed=True) + site.announceTracker("zero", "127.0.0.1:1544") + assert len(site.peers) == 2 + assert "bka4ht2bzxchy44r.onion:1234" in site.peers diff --git a/plugins/disabled-Bootstrapper/Test/conftest.py b/plugins/disabled-Bootstrapper/Test/conftest.py new file mode 100644 index 00000000..8c1df5b2 --- /dev/null +++ b/plugins/disabled-Bootstrapper/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * \ No newline at end of file diff --git a/plugins/disabled-Bootstrapper/Test/pytest.ini b/plugins/disabled-Bootstrapper/Test/pytest.ini new file mode 100644 index 00000000..d09210d1 --- /dev/null +++ b/plugins/disabled-Bootstrapper/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/disabled-Bootstrapper/__init__.py b/plugins/disabled-Bootstrapper/__init__.py new file mode 100644 index 00000000..ca533eac --- /dev/null +++ b/plugins/disabled-Bootstrapper/__init__.py @@ -0,0 +1 @@ +import BootstrapperPlugin \ No newline at end of file diff --git a/plugins/disabled-Dnschain/SiteManagerPlugin.py b/plugins/disabled-Dnschain/SiteManagerPlugin.py new file mode 100644 index 00000000..9121b425 --- /dev/null +++ b/plugins/disabled-Dnschain/SiteManagerPlugin.py @@ -0,0 +1,153 @@ +import logging, json, os, re, sys, time +import gevent +from Plugin import PluginManager +from Config import config +from util import Http +from Debug import Debug + +allow_reload = False # No reload supported + +log = logging.getLogger("DnschainPlugin") + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + dns_cache_path = "%s/dns_cache.json" % config.data_dir + dns_cache = None + + # Checks if its a valid address + def isAddress(self, address): + if self.isDomain(address): + return True + else: + return super(SiteManagerPlugin, self).isAddress(address) + + + # Return: True if the address is domain + def isDomain(self, address): + return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + + + # Load dns entries from data/dns_cache.json + def loadDnsCache(self): + if os.path.isfile(self.dns_cache_path): + self.dns_cache = json.load(open(self.dns_cache_path)) + else: + self.dns_cache = {} + log.debug("Loaded dns cache, entries: %s" % len(self.dns_cache)) + + + # Save dns entries to data/dns_cache.json + def saveDnsCache(self): + json.dump(self.dns_cache, open(self.dns_cache_path, "wb"), indent=2) + + + # Resolve domain using dnschain.net + # Return: The address or None + def resolveDomainDnschainNet(self, domain): + try: + match = self.isDomain(domain) + sub_domain = match.group(1).strip(".") + top_domain = match.group(2) + if not sub_domain: sub_domain = "@" + address = None + with gevent.Timeout(5, Exception("Timeout: 5s")): + res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read() + data = json.loads(res)["data"]["value"] + if "zeronet" in data: + for key, val in data["zeronet"].iteritems(): + self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours + self.saveDnsCache() + return data["zeronet"].get(sub_domain) + # Not found + return address + except Exception, err: + log.debug("Dnschain.net %s resolve error: %s" % (domain, Debug.formatException(err))) + + + # Resolve domain using dnschain.info + # Return: The address or None + def resolveDomainDnschainInfo(self, domain): + try: + match = self.isDomain(domain) + sub_domain = match.group(1).strip(".") + top_domain = match.group(2) + if not sub_domain: sub_domain = "@" + address = None + with gevent.Timeout(5, Exception("Timeout: 5s")): + res = Http.get("https://dnschain.info/bit/d/%s" % re.sub("\.bit$", "", top_domain)).read() + data = json.loads(res)["value"] + for key, val in data["zeronet"].iteritems(): + self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours + self.saveDnsCache() + return data["zeronet"].get(sub_domain) + # Not found + return address + except Exception, err: + log.debug("Dnschain.info %s resolve error: %s" % (domain, Debug.formatException(err))) + + + # Resolve domain + # Return: The address or None + def resolveDomain(self, domain): + domain = domain.lower() + if self.dns_cache == None: + self.loadDnsCache() + if domain.count(".") < 2: # Its a topleved request, prepend @. to it + domain = "@."+domain + + domain_details = self.dns_cache.get(domain) + if domain_details and time.time() < domain_details[1]: # Found in cache and its not expired + return domain_details[0] + else: + # Resovle dns using dnschain + thread_dnschain_info = gevent.spawn(self.resolveDomainDnschainInfo, domain) + thread_dnschain_net = gevent.spawn(self.resolveDomainDnschainNet, domain) + gevent.joinall([thread_dnschain_net, thread_dnschain_info]) # Wait for finish + + if thread_dnschain_info.value and thread_dnschain_net.value: # Booth successfull + if thread_dnschain_info.value == thread_dnschain_net.value: # Same returned value + return thread_dnschain_info.value + else: + log.error("Dns %s missmatch: %s != %s" % (domain, thread_dnschain_info.value, thread_dnschain_net.value)) + + # Problem during resolve + if domain_details: # Resolve failed, but we have it in the cache + domain_details[1] = time.time()+60*60 # Dont try again for 1 hour + return domain_details[0] + else: # Not found in cache + self.dns_cache[domain] = [None, time.time()+60] # Don't check again for 1 min + return None + + + # Return or create site and start download site files + # Return: Site or None if dns resolve failed + def need(self, address, all_file=True): + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: + address = address_resolved + else: + return None + + return super(SiteManagerPlugin, self).need(address, all_file) + + + # Return: Site object or None if not found + def get(self, address): + if self.sites == None: # Not loaded yet + self.load() + if self.isDomain(address): # Its looks like a domain + address_resolved = self.resolveDomain(address) + if address_resolved: # Domain found + site = self.sites.get(address_resolved) + if site: + site_domain = site.settings.get("domain") + if site_domain != address: + site.settings["domain"] = address + else: # Domain not found + site = self.sites.get(address) + + else: # Access by site address + site = self.sites.get(address) + return site + diff --git a/plugins/disabled-Dnschain/UiRequestPlugin.py b/plugins/disabled-Dnschain/UiRequestPlugin.py new file mode 100644 index 00000000..e1a095a5 --- /dev/null +++ b/plugins/disabled-Dnschain/UiRequestPlugin.py @@ -0,0 +1,34 @@ +import re +from Plugin import PluginManager + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def __init__(self, server = None): + from Site import SiteManager + self.site_manager = SiteManager.site_manager + super(UiRequestPlugin, self).__init__(server) + + + # Media request + def actionSiteMedia(self, path): + match = re.match("/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/"+address+match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output + + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_site_address = re.match("/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") + + if referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False + diff --git a/plugins/disabled-Dnschain/__init__.py b/plugins/disabled-Dnschain/__init__.py new file mode 100644 index 00000000..2b36af5d --- /dev/null +++ b/plugins/disabled-Dnschain/__init__.py @@ -0,0 +1,3 @@ +# This plugin is experimental, if you really want to enable uncomment the following lines: +# import DnschainPlugin +# import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-DonationMessage/DonationMessagePlugin.py b/plugins/disabled-DonationMessage/DonationMessagePlugin.py new file mode 100644 index 00000000..14f95b08 --- /dev/null +++ b/plugins/disabled-DonationMessage/DonationMessagePlugin.py @@ -0,0 +1,22 @@ +import re +from Plugin import PluginManager + +# Warning: If you modify the donation address then renmae the plugin's directory to "MyDonationMessage" to prevent the update script overwrite + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Inject a donation message to every page top right corner + def renderWrapper(self, *args, **kwargs): + body = super(UiRequestPlugin, self).renderWrapper(*args, **kwargs) # Get the wrapper frame output + + inject_html = """ + + Please donate to help to keep this ZeroProxy alive + + + """ + + return re.sub("\s*\s*$", inject_html, body) diff --git a/plugins/disabled-DonationMessage/__init__.py b/plugins/disabled-DonationMessage/__init__.py new file mode 100644 index 00000000..f8dcae2f --- /dev/null +++ b/plugins/disabled-DonationMessage/__init__.py @@ -0,0 +1 @@ +import DonationMessagePlugin diff --git a/plugins/disabled-Multiuser/MultiuserPlugin.py b/plugins/disabled-Multiuser/MultiuserPlugin.py new file mode 100644 index 00000000..390cb686 --- /dev/null +++ b/plugins/disabled-Multiuser/MultiuserPlugin.py @@ -0,0 +1,201 @@ +import re +import sys +import json + +from Config import config +from Plugin import PluginManager +from Crypt import CryptBitcoin +import UserPlugin + +try: + local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json +except Exception, err: + local_master_addresses = set() + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def __init__(self, *args, **kwargs): + self.user_manager = sys.modules["User.UserManager"].user_manager + super(UiRequestPlugin, self).__init__(*args, **kwargs) + + # Create new user and inject user welcome message if necessary + # Return: Html body also containing the injection + def actionWrapper(self, path, extra_headers=None): + + match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) + if not match: + return False + + inner_path = match.group("inner_path").lstrip("/") + html_request = "." not in inner_path or inner_path.endswith(".html") # Only inject html to html requests + + user_created = False + if html_request: + user = self.getCurrentUser() # Get user from cookie + if not user: # No user found by cookie + user = self.user_manager.create() + user_created = True + else: + user = None + + # Disable new site creation if --multiuser_no_new_sites enabled + if config.multiuser_no_new_sites: + path_parts = self.parsePath(path) + if not self.server.site_manager.get(match.group("address")) and (not user or user.master_address not in local_master_addresses): + self.sendHeader(404) + return self.formatError("Not Found", "Adding new sites disabled on this proxy", details=False) + + if user_created: + if not extra_headers: + extra_headers = [] + extra_headers.append(('Set-Cookie', "master_address=%s;path=/;max-age=2592000;" % user.master_address)) # = 30 days + + loggedin = self.get.get("login") == "done" + + back_generator = super(UiRequestPlugin, self).actionWrapper(path, extra_headers) # Get the wrapper frame output + + if not back_generator: # Wrapper error or not string returned, injection not possible + return False + + if user_created: + back = back_generator.next() + master_seed = user.master_seed + # Inject the welcome message + inject_html = """ + + + + + + """.replace("\t", "") + inject_html = inject_html.replace("{master_seed}", master_seed) # Set the master seed in the message + + return iter([re.sub("\s*\s*$", inject_html, back)]) # Replace the tags with the injection + + elif loggedin: + back = back_generator.next() + inject_html = """ + + + + + """.replace("\t", "") + if user.master_address in local_master_addresses: + message = "Hello master!" + else: + message = "Hello again!" + inject_html = inject_html.replace("{message}", message) + return iter([re.sub("\s*\s*$", inject_html, back)]) # Replace the tags with the injection + + else: # No injection necessary + return back_generator + + # Get the current user based on request's cookies + # Return: User object or None if no match + def getCurrentUser(self): + cookies = self.getCookies() + user = None + if "master_address" in cookies: + users = self.user_manager.list() + user = users.get(cookies["master_address"]) + return user + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def __init__(self, *args, **kwargs): + self.multiuser_denied_cmds = ( + "siteDelete", "configSet", "serverShutdown", "serverUpdate", "siteClone", + "siteSetOwned", "optionalLimitSet", "siteSetAutodownloadoptional", "dbReload", "dbRebuild", + "mergerSiteDelete", "siteSetLimit", + "muteAdd", "muteRemove" + ) + if config.multiuser_no_new_sites: + self.multiuser_denied_cmds += ("MergerSiteAdd", ) + + super(UiWebsocketPlugin, self).__init__(*args, **kwargs) + + # Let the page know we running in multiuser mode + def formatServerInfo(self): + server_info = super(UiWebsocketPlugin, self).formatServerInfo() + server_info["multiuser"] = True + if "ADMIN" in self.site.settings["permissions"]: + server_info["master_address"] = self.user.master_address + return server_info + + # Show current user's master seed + def actionUserShowMasterSeed(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Show master seed not allowed") + message = "Your unique private key:" + message += "
    %s
    " % self.user.master_seed + message += "(Save it, you can access your account using this information)" + self.cmd("notification", ["info", message]) + + # Logout user + def actionUserLogout(self, to): + if "ADMIN" not in self.site.settings["permissions"]: + return self.response(to, "Logout not allowed") + message = "You have been logged out. Login to another account" + message += "" + self.cmd("notification", ["done", message, 1000000]) # 1000000 = Show ~forever :) + # Delete from user_manager + user_manager = sys.modules["User.UserManager"].user_manager + if self.user.master_address in user_manager.users: + if not config.multiuser_local: + del user_manager.users[self.user.master_address] + self.response(to, "Successful logout") + else: + self.response(to, "User not found") + + # Show login form + def actionUserLoginForm(self, to): + self.cmd("prompt", ["Login
    Your private key:", "password", "Login"], self.responseUserLogin) + + # Login form submit + def responseUserLogin(self, master_seed): + user_manager = sys.modules["User.UserManager"].user_manager + user = user_manager.get(CryptBitcoin.privatekeyToAddress(master_seed)) + if not user: + user = user_manager.create(master_seed=master_seed) + if user.master_address: + message = "Successfull login, reloading page..." + message += "" % user.master_address + message += "" + self.cmd("notification", ["done", message]) + else: + self.cmd("notification", ["error", "Error: Invalid master seed"]) + self.actionUserLoginForm(0) + + def hasCmdPermission(self, cmd): + if not config.multiuser_local and self.user.master_address not in local_master_addresses and cmd in self.multiuser_denied_cmds: + self.cmd("notification", ["info", "This function is disabled on this proxy!"]) + return False + else: + return super(UiWebsocketPlugin, self).hasCmdPermission(cmd) + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("Multiuser plugin") + group.add_argument('--multiuser_local', help="Enable unsafe Ui functions and write users to disk", action='store_true') + group.add_argument('--multiuser_no_new_sites', help="Denies adding new sites by normal users", action='store_true') + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/disabled-Multiuser/UserPlugin.py b/plugins/disabled-Multiuser/UserPlugin.py new file mode 100644 index 00000000..3c9ebae8 --- /dev/null +++ b/plugins/disabled-Multiuser/UserPlugin.py @@ -0,0 +1,35 @@ +from Config import config +from Plugin import PluginManager + +allow_reload = False + +@PluginManager.registerTo("UserManager") +class UserManagerPlugin(object): + def load(self): + if not config.multiuser_local: + # In multiuser mode do not load the users + if not self.users: + self.users = {} + return self.users + else: + return super(UserManagerPlugin, self).load() + + # Find user by master address + # Return: User or None + def get(self, master_address=None): + users = self.list() + if master_address in users: + user = users[master_address] + else: + user = None + return user + + +@PluginManager.registerTo("User") +class UserPlugin(object): + # In multiuser mode users data only exits in memory, dont write to data/user.json + def save(self): + if not config.multiuser_local: + return False + else: + return super(UserPlugin, self).save() diff --git a/plugins/disabled-Multiuser/__init__.py b/plugins/disabled-Multiuser/__init__.py new file mode 100644 index 00000000..154d6008 --- /dev/null +++ b/plugins/disabled-Multiuser/__init__.py @@ -0,0 +1 @@ +import MultiuserPlugin diff --git a/plugins/disabled-StemPort/StemPortPlugin.py b/plugins/disabled-StemPort/StemPortPlugin.py new file mode 100644 index 00000000..3a3787c7 --- /dev/null +++ b/plugins/disabled-StemPort/StemPortPlugin.py @@ -0,0 +1,135 @@ +import logging +import traceback + +import socket +import stem +from stem import Signal +from stem.control import Controller +from stem.socket import ControlPort + +from Plugin import PluginManager +from Config import config +from Debug import Debug + +if config.tor != "disable": + from gevent import monkey + monkey.patch_time() + monkey.patch_socket(dns=False) + monkey.patch_thread() + print "Stem Port Plugin: modules are patched." +else: + print "Stem Port Plugin: Tor mode disabled. Module patching skipped." + + +class PatchedControlPort(ControlPort): + def _make_socket(self): + try: + if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one + control_socket = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) + else: + control_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + # TODO: repeated code - consider making a separate method + + control_socket.connect((self._control_addr, self._control_port)) + return control_socket + except socket.error as exc: + raise stem.SocketError(exc) + +def from_port(address = '127.0.0.1', port = 'default'): + import stem.connection + + if not stem.util.connection.is_valid_ipv4_address(address): + raise ValueError('Invalid IP address: %s' % address) + elif port != 'default' and not stem.util.connection.is_valid_port(port): + raise ValueError('Invalid port: %s' % port) + + if port == 'default': + raise ValueError('Must specify a port') + else: + control_port = PatchedControlPort(address, port) + + return Controller(control_port) + + +@PluginManager.registerTo("TorManager") +class TorManagerPlugin(object): + + def connectController(self): + self.log.info("Authenticate using Stem... %s:%s" % (self.ip, self.port)) + + try: + with self.lock: + if config.tor_password: + controller = from_port(port=self.port, password=config.tor_password) + else: + controller = from_port(port=self.port) + controller.authenticate() + self.controller = controller + self.status = u"Connected (via Stem)" + except Exception, err: + print("\n") + traceback.print_exc() + print("\n") + + self.controller = None + self.status = u"Error (%s)" % err + self.log.error("Tor stem connect error: %s" % Debug.formatException(err)) + + return self.controller + + + def disconnect(self): + self.controller.close() + self.controller = None + + + def resetCircuits(self): + try: + self.controller.signal(Signal.NEWNYM) + except Exception, err: + self.status = u"Stem reset circuits error (%s)" % err + self.log.error("Stem reset circuits error: %s" % err) + + + def makeOnionAndKey(self): + try: + service = self.controller.create_ephemeral_hidden_service( + {self.fileserver_port: self.fileserver_port}, + await_publication = False + ) + if service.private_key_type != "RSA1024": + raise Exception("ZeroNet doesn't support crypto " + service.private_key_type) + + self.log.debug("Stem created %s.onion (async descriptor publication)" % service.service_id) + + return (service.service_id, service.private_key) + + except Exception, err: + self.status = u"AddOnion error (Stem: %s)" % err + self.log.error("Failed to create hidden service with Stem: " + err) + return False + + + def delOnion(self, address): + try: + self.controller.remove_ephemeral_hidden_service(address) + return True + except Exception, err: + self.status = u"DelOnion error (Stem: %s)" % err + self.log.error("Stem failed to delete %s.onion: %s" % (address, err)) + self.disconnect() # Why? + return False + + + def request(self, cmd): + with self.lock: + if not self.enabled: + return False + else: + self.log.error("[WARNING] StemPort self.request should not be called") + return "" + + def send(self, cmd, conn=None): + self.log.error("[WARNING] StemPort self.send should not be called") + return "" diff --git a/plugins/disabled-StemPort/__init__.py b/plugins/disabled-StemPort/__init__.py new file mode 100644 index 00000000..1ce4d973 --- /dev/null +++ b/plugins/disabled-StemPort/__init__.py @@ -0,0 +1,10 @@ +try: + from stem.control import Controller + stem_found = True +except Exception, err: + print "STEM NOT FOUND! %s" % err + stem_found = False + +if stem_found: + print "Starting Stem plugin..." + import StemPortPlugin diff --git a/plugins/disabled-UiPassword/UiPasswordPlugin.py b/plugins/disabled-UiPassword/UiPasswordPlugin.py new file mode 100644 index 00000000..a0e42e81 --- /dev/null +++ b/plugins/disabled-UiPassword/UiPasswordPlugin.py @@ -0,0 +1,118 @@ +import string +import random +import time +import json +import re + +from Config import config +from Plugin import PluginManager + +if "sessions" not in locals().keys(): # To keep sessions between module reloads + sessions = {} + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + sessions = sessions + last_cleanup = time.time() + + def route(self, path): + if path.endswith("favicon.ico"): + return self.actionFile("src/Ui/media/img/favicon.ico") + else: + if config.ui_password: + if time.time() - self.last_cleanup > 60 * 60: # Cleanup expired sessions every hour + self.cleanup() + # Validate session + session_id = self.getCookies().get("session_id") + if session_id not in self.sessions: # Invalid session id, display login + return self.actionLogin() + return super(UiRequestPlugin, self).route(path) + + # Action: Login + def actionLogin(self): + template = open("plugins/UiPassword/login.html").read() + self.sendHeader() + posted = self.getPosted() + if posted: # Validate http posted data + if self.checkPassword(posted.get("password")): + # Valid password, create session + session_id = self.randomString(26) + self.sessions[session_id] = { + "added": time.time(), + "keep": posted.get("keep") + } + + # Redirect to homepage or referer + url = self.env.get("HTTP_REFERER", "") + if not url or re.sub("\?.*", "", url).endswith("/Login"): + url = "/" + config.homepage + cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days + self.start_response('301 Redirect', [('Location', url), cookie_header]) + yield "Redirecting..." + + else: + # Invalid password, show login form again + template = template.replace("{result}", "bad_password") + yield template + + def checkPassword(self, password): + if password == config.ui_password: + return True + else: + return False + + def randomString(self, chars): + return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(chars)) + + @classmethod + def cleanup(cls): + cls.last_cleanup = time.time() + for session_id, session in cls.sessions.items(): + if session["keep"] and time.time() - session["added"] > 60 * 60 * 24 * 60: # Max 60days for keep sessions + del(cls.sessions[session_id]) + elif not session["keep"] and time.time() - session["added"] > 60 * 60 * 24: # Max 24h for non-keep sessions + del(cls.sessions[session_id]) + + # Action: Display sessions + def actionSessions(self): + self.sendHeader() + yield "
    "
    +        yield json.dumps(self.sessions, indent=4)
    +
    +    # Action: Logout
    +    def actionLogout(self):
    +        # Session id has to passed as get parameter or called without referer to avoid remote logout
    +        session_id = self.getCookies().get("session_id")
    +        if not self.env.get("HTTP_REFERER") or session_id == self.get.get("session_id"):
    +            if session_id in self.sessions:
    +                del self.sessions[session_id]
    +            self.start_response('301 Redirect', [
    +                ('Location', "/"),
    +                ('Set-Cookie', "session_id=deleted; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT")
    +            ])
    +            yield "Redirecting..."
    +        else:
    +            self.sendHeader()
    +            yield "Error: Invalid session id"
    +
    +
    +@PluginManager.registerTo("ConfigPlugin")
    +class ConfigPlugin(object):
    +    def createArguments(self):
    +        group = self.parser.add_argument_group("UiPassword plugin")
    +        group.add_argument('--ui_password', help='Password to access UiServer', default=None, metavar="password")
    +
    +        return super(ConfigPlugin, self).createArguments()
    +
    +
    +@PluginManager.registerTo("UiWebsocket")
    +class UiWebsocketPlugin(object):
    +    def actionUiLogout(self, to):
    +        permissions = self.getPermissions(to)
    +        if "ADMIN" not in permissions:
    +            return self.response(to, "You don't have permission to run this command")
    +
    +        session_id = self.request.getCookies().get("session_id", "")
    +        message = "" % session_id
    +        self.cmd("notification", ["done", message])
    diff --git a/plugins/disabled-UiPassword/__init__.py b/plugins/disabled-UiPassword/__init__.py
    new file mode 100644
    index 00000000..37350c3d
    --- /dev/null
    +++ b/plugins/disabled-UiPassword/__init__.py
    @@ -0,0 +1 @@
    +import UiPasswordPlugin
    \ No newline at end of file
    diff --git a/plugins/disabled-UiPassword/login.html b/plugins/disabled-UiPassword/login.html
    new file mode 100644
    index 00000000..12d0889d
    --- /dev/null
    +++ b/plugins/disabled-UiPassword/login.html
    @@ -0,0 +1,116 @@
    +
    +
    + Log In
    + 
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    diff --git a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py
    new file mode 100644
    index 00000000..a25c7da8
    --- /dev/null
    +++ b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py
    @@ -0,0 +1,68 @@
    +import logging, json, os, re, sys, time
    +import gevent
    +from Plugin import PluginManager
    +from Config import config
    +from Debug import Debug
    +from domainLookup import lookupDomain
    +
    +allow_reload = False # No reload supported
    +
    +log = logging.getLogger("Zeroname-localPlugin")
    +
    +
    +@PluginManager.registerTo("SiteManager")
    +class SiteManagerPlugin(object):
    +    def load(self):
    +        super(SiteManagerPlugin, self).load()
    +
    +    # Checks if its a valid address
    +    def isAddress(self, address):
    +        if self.isDomain(address): 
    +            return True
    +        else:
    +            return super(SiteManagerPlugin, self).isAddress(address)
    +
    +
    +    # Return: True if the address is domain
    +    def isDomain(self, address):
    +        return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
    +
    +
    +    # Resolve domain
    +    # Return: The address or None
    +    def resolveDomain(self, domain):
    +        return lookupDomain(domain)
    +
    +
    +    # Return or create site and start download site files
    +    # Return: Site or None if dns resolve failed
    +    def need(self, address, all_file=True):
    +        if self.isDomain(address): # Its looks like a domain
    +            address_resolved = self.resolveDomain(address)
    +            if address_resolved:
    +                address = address_resolved
    +            else:
    +                return None
    +        
    +        return super(SiteManagerPlugin, self).need(address, all_file)
    +
    +
    +    # Return: Site object or None if not found
    +    def get(self, address):
    +        if self.sites == None: # Not loaded yet
    +            self.load()
    +        if self.isDomain(address): # Its looks like a domain
    +            address_resolved = self.resolveDomain(address)
    +            if address_resolved: # Domain found
    +                site = self.sites.get(address_resolved)
    +                if site:
    +                    site_domain = site.settings.get("domain")
    +                    if site_domain != address:
    +                        site.settings["domain"] = address
    +            else: # Domain not found
    +                site = self.sites.get(address)
    +
    +        else: # Access by site address
    +            site = self.sites.get(address)
    +        return site
    +
    diff --git a/plugins/disabled-Zeroname-local/UiRequestPlugin.py b/plugins/disabled-Zeroname-local/UiRequestPlugin.py
    new file mode 100644
    index 00000000..462e485f
    --- /dev/null
    +++ b/plugins/disabled-Zeroname-local/UiRequestPlugin.py
    @@ -0,0 +1,40 @@
    +import re
    +from Plugin import PluginManager
    +
    +@PluginManager.registerTo("UiRequest")
    +class UiRequestPlugin(object):
    +    def __init__(self, *args, **kwargs):
    +        from Site import SiteManager
    +        self.site_manager = SiteManager.site_manager
    +        super(UiRequestPlugin, self).__init__(*args, **kwargs)
    +
    +
    +    # Media request
    +    def actionSiteMedia(self, path):
    +        match = re.match("/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + if match: # Its a valid domain, resolve first + domain = match.group("address") + address = self.site_manager.resolveDomain(domain) + if address: + path = "/media/"+address+match.group("inner_path") + return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output + + + # Is mediarequest allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + referer_path = re.sub("\?.*", "", referer_path) # Remove http params + + if self.isProxyRequest(): # Match to site domain + referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access + referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1) + else: # Match to request path + referer_site_address = re.match("/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") + + if referer_site_address == site_address: # Referer site address as simple address + return True + elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns + return True + else: # Invalid referer + return False + diff --git a/plugins/disabled-Zeroname-local/__init__.py b/plugins/disabled-Zeroname-local/__init__.py new file mode 100644 index 00000000..889802db --- /dev/null +++ b/plugins/disabled-Zeroname-local/__init__.py @@ -0,0 +1,2 @@ +import UiRequestPlugin +import SiteManagerPlugin \ No newline at end of file diff --git a/plugins/disabled-Zeroname-local/bitcoinrpc/__init__.py b/plugins/disabled-Zeroname-local/bitcoinrpc/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py b/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py new file mode 100644 index 00000000..52cdb107 --- /dev/null +++ b/plugins/disabled-Zeroname-local/bitcoinrpc/authproxy.py @@ -0,0 +1,190 @@ + +""" + Copyright 2011 Jeff Garzik + + AuthServiceProxy has the following improvements over python-jsonrpc's + ServiceProxy class: + + - HTTP connections persist for the life of the AuthServiceProxy object + (if server supports HTTP/1.1) + - sends protocol 'version', per JSON-RPC 1.1 + - sends proper, incrementing 'id' + - sends Basic HTTP authentication headers + - parses all JSON numbers that look like floats as Decimal + - uses standard Python json lib + + Previous copyright, from python-jsonrpc/jsonrpc/proxy.py: + + Copyright (c) 2007 Jan-Klaas Kollhof + + This file is part of jsonrpc. + + jsonrpc is free software; you can redistribute it and/or modify + it under the terms of the GNU Lesser General Public License as published by + the Free Software Foundation; either version 2.1 of the License, or + (at your option) any later version. + + This software is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public License + along with this software; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +""" + +try: + import http.client as httplib +except ImportError: + import httplib +import base64 +import decimal +import json +import logging +try: + import urllib.parse as urlparse +except ImportError: + import urlparse + +USER_AGENT = "AuthServiceProxy/0.1" + +HTTP_TIMEOUT = 30 + +log = logging.getLogger("BitcoinRPC") + +class JSONRPCException(Exception): + def __init__(self, rpc_error): + parent_args = [] + try: + parent_args.append(rpc_error['message']) + except: + pass + Exception.__init__(self, *parent_args) + self.error = rpc_error + self.code = rpc_error['code'] if 'code' in rpc_error else None + self.message = rpc_error['message'] if 'message' in rpc_error else None + + def __str__(self): + return '%d: %s' % (self.code, self.message) + + def __repr__(self): + return '<%s \'%s\'>' % (self.__class__.__name__, self) + + +def EncodeDecimal(o): + if isinstance(o, decimal.Decimal): + return float(round(o, 8)) + raise TypeError(repr(o) + " is not JSON serializable") + +class AuthServiceProxy(object): + __id_count = 0 + + def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None): + self.__service_url = service_url + self.__service_name = service_name + self.__url = urlparse.urlparse(service_url) + if self.__url.port is None: + port = 80 + else: + port = self.__url.port + (user, passwd) = (self.__url.username, self.__url.password) + try: + user = user.encode('utf8') + except AttributeError: + pass + try: + passwd = passwd.encode('utf8') + except AttributeError: + pass + authpair = user + b':' + passwd + self.__auth_header = b'Basic ' + base64.b64encode(authpair) + + self.__timeout = timeout + + if connection: + # Callables re-use the connection of the original proxy + self.__conn = connection + elif self.__url.scheme == 'https': + self.__conn = httplib.HTTPSConnection(self.__url.hostname, port, + timeout=timeout) + else: + self.__conn = httplib.HTTPConnection(self.__url.hostname, port, + timeout=timeout) + + def __getattr__(self, name): + if name.startswith('__') and name.endswith('__'): + # Python internal stuff + raise AttributeError + if self.__service_name is not None: + name = "%s.%s" % (self.__service_name, name) + return AuthServiceProxy(self.__service_url, name, self.__timeout, self.__conn) + + def __call__(self, *args): + AuthServiceProxy.__id_count += 1 + + log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name, + json.dumps(args, default=EncodeDecimal))) + postdata = json.dumps({'version': '1.1', + 'method': self.__service_name, + 'params': args, + 'id': AuthServiceProxy.__id_count}, default=EncodeDecimal) + self.__conn.request('POST', self.__url.path, postdata, + {'Host': self.__url.hostname, + 'User-Agent': USER_AGENT, + 'Authorization': self.__auth_header, + 'Content-type': 'application/json'}) + self.__conn.sock.settimeout(self.__timeout) + + response = self._get_response() + if response.get('error') is not None: + raise JSONRPCException(response['error']) + elif 'result' not in response: + raise JSONRPCException({ + 'code': -343, 'message': 'missing JSON-RPC result'}) + + return response['result'] + + def batch_(self, rpc_calls): + """Batch RPC call. + Pass array of arrays: [ [ "method", params... ], ... ] + Returns array of results. + """ + batch_data = [] + for rpc_call in rpc_calls: + AuthServiceProxy.__id_count += 1 + m = rpc_call.pop(0) + batch_data.append({"jsonrpc":"2.0", "method":m, "params":rpc_call, "id":AuthServiceProxy.__id_count}) + + postdata = json.dumps(batch_data, default=EncodeDecimal) + log.debug("--> "+postdata) + self.__conn.request('POST', self.__url.path, postdata, + {'Host': self.__url.hostname, + 'User-Agent': USER_AGENT, + 'Authorization': self.__auth_header, + 'Content-type': 'application/json'}) + results = [] + responses = self._get_response() + for response in responses: + if response['error'] is not None: + raise JSONRPCException(response['error']) + elif 'result' not in response: + raise JSONRPCException({ + 'code': -343, 'message': 'missing JSON-RPC result'}) + else: + results.append(response['result']) + return results + + def _get_response(self): + http_response = self.__conn.getresponse() + if http_response is None: + raise JSONRPCException({ + 'code': -342, 'message': 'missing HTTP response from server'}) + + responsedata = http_response.read().decode('utf8') + response = json.loads(responsedata, parse_float=decimal.Decimal) + if "error" in response and response["error"] is None: + log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal))) + else: + log.debug("<-- "+responsedata) + return response diff --git a/plugins/disabled-Zeroname-local/domainLookup.py b/plugins/disabled-Zeroname-local/domainLookup.py new file mode 100644 index 00000000..930168c0 --- /dev/null +++ b/plugins/disabled-Zeroname-local/domainLookup.py @@ -0,0 +1,78 @@ +from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException +import time, json, os, sys, re, socket + +# Connecting to RPC +def initRpc(config): + """Initialize Namecoin RPC""" + rpc_data = { + 'connect': '127.0.0.1', + 'port': '8336', + 'user': 'PLACEHOLDER', + 'password': 'PLACEHOLDER', + 'clienttimeout': '900' + } + try: + fptr = open(config, 'r') + lines = fptr.readlines() + fptr.close() + except: + return None # Or take some other appropriate action + + for line in lines: + if not line.startswith('rpc'): + continue + key_val = line.split(None, 1)[0] + (key, val) = key_val.split('=', 1) + if not key or not val: + continue + rpc_data[key[3:]] = val + + url = 'http://%(user)s:%(password)s@%(connect)s:%(port)s' % rpc_data + + return url, int(rpc_data['clienttimeout']) + +# Either returns domain's address or none if it doesn't exist +# Supports subdomains and .bit on the end +def lookupDomain(domain): + domain = domain.lower() + + #remove .bit on end + if domain[-4:] == ".bit": + domain = domain[0:-4] + + #check for subdomain + if domain.find(".") != -1: + subdomain = domain[0:domain.find(".")] + domain = domain[domain.find(".")+1:] + else: + subdomain = "" + + try: + domain_object = rpc.name_show("d/"+domain) + except: + #domain doesn't exist + return None + + domain_json = json.loads(domain_object["value"]) + + try: + domain_address = domain_json["zeronet"][subdomain] + except: + #domain exists but doesn't have any zeronet value + return None + + return domain_address + +# Loading config... + +# Check whether platform is on windows or linux +# On linux namecoin is installed under ~/.namecoin, while on on windows it is in %appdata%/Namecoin + +if sys.platform == "win32": + namecoin_location = os.getenv('APPDATA') + "/Namecoin/" +else: + namecoin_location = os.path.expanduser("~/.namecoin/") + +# Initialize rpc connection +rpc_auth, rpc_timeout = initRpc(namecoin_location + "namecoin.conf") +rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) diff --git a/requirements.txt b/requirements.txt index 538a6dfc..eef988d0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,2 @@ -gevent==1.4.0; python_version <= "3.6" -greenlet==0.4.16; python_version <= "3.6" -gevent>=20.9.0; python_version >= "3.7" -msgpack>=0.4.4 -base58 -merkletools @ git+https://github.com/ZeroNetX/pymerkletools.git@dev -rsa -PySocks>=1.6.8 -pyasn1 -websocket_client -gevent-ws -coincurve -maxminddb +gevent>=1.1.0 +msgpack-python>=0.4.4 diff --git a/src/Config.py b/src/Config.py index a9208d55..3a196e86 100644 --- a/src/Config.py +++ b/src/Config.py @@ -3,40 +3,17 @@ import sys import os import locale import re -import configparser -import logging -import logging.handlers -import stat -import time +import ConfigParser class Config(object): def __init__(self, argv): - self.version = "0.9.0" - self.rev = 4630 + self.version = "0.5.4" + self.rev = 2054 self.argv = argv self.action = None - self.test_parser = None - self.pending_changes = {} - self.need_restart = False - self.keys_api_change_allowed = set([ - "tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers", - "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external", "offline", - "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" - ]) - self.keys_restart_need = set([ - "tor", "fileserver_port", "fileserver_ip_type", "threads_fs_read", "threads_fs_write", "threads_crypt", "threads_db" - ]) - self.start_dir = self.getStartDir() - - self.config_file = self.start_dir + "/zeronet.conf" - self.data_dir = self.start_dir + "/data" - self.log_dir = self.start_dir + "/log" - self.openssl_lib_file = None - self.openssl_bin_file = None - - self.trackers_file = False + self.config_file = "zeronet.conf" self.createParser() self.createArguments() @@ -53,41 +30,17 @@ class Config(object): def strToBool(self, v): return v.lower() in ("yes", "true", "t", "1") - def getStartDir(self): - this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd") - - if "--start_dir" in self.argv: - start_dir = self.argv[self.argv.index("--start_dir") + 1] - elif this_file.endswith("/Contents/Resources/core/src/Config.py"): - # Running as ZeroNet.app - if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")): - # Runnig from non-writeable directory, put data to Application Support - start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet") - else: - # Running from writeable directory put data next to .app - start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file) - elif this_file.endswith("/core/src/Config.py"): - # Running as exe or source is at Application Support directory, put var files to outside of core dir - start_dir = this_file.replace("/core/src/Config.py", "") - elif this_file.endswith("usr/share/zeronet/src/Config.py"): - # Running from non-writeable location, e.g., AppImage - start_dir = os.path.expanduser("~/ZeroNet") - else: - start_dir = "." - - return start_dir - # Create command line arguments def createArguments(self): - from Crypt import CryptHash - access_key_default = CryptHash.random(24, "base64") # Used to allow restrited plugins when multiuser plugin is enabled trackers = [ - "http://open.acgnxtracker.com:80/announce", # DE - "http://tracker.bt4g.com:2095/announce", # Cloudflare - "http://tracker.files.fm:6969/announce", - "http://t.publictracker.xyz:6969/announce", - "https://tracker.lilithraws.cf:443/announce", - "https://tracker.babico.name.tr:443/announce", + "zero://boot3rdez4rzn36x.onion:15441", + "zero://boot.zeronet.io#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:15441", + "udp://tracker.coppersurfer.tk:6969", + "udp://tracker.leechers-paradise.org:6969", + "udp://9.rarbg.com:2710", + "http://tracker.opentrackr.org:1337/announce", + "http://explodie.org:6969/announce", + "http://tracker1.wasabii.com.tw:6969/announce" ] # Platform specific if sys.platform.startswith("win"): @@ -97,32 +50,48 @@ class Config(object): try: language, enc = locale.getdefaultlocale() - language = language.lower().replace("_", "-") - if language not in ["pt-br", "zh-tw"]: - language = language.split("-")[0] + language = language.split("_")[0] except Exception: language = "en" use_openssl = True - if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue + if repr(1483108852.565) != "1483108852.565": fix_float_decimals = True else: fix_float_decimals = False - config_file = self.start_dir + "/zeronet.conf" - data_dir = self.start_dir + "/data" - log_dir = self.start_dir + "/log" + this_file = os.path.abspath(__file__).replace("\\", "/") - ip_local = ["127.0.0.1", "::1"] + if this_file.endswith("/Contents/Resources/core/src/Config.py"): + # Running as ZeroNet.app + if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")): + # Runnig from non-writeable directory, put data to Application Support + start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet").decode(sys.getfilesystemencoding()) + else: + # Running from writeable directory put data next to .app + start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file).decode(sys.getfilesystemencoding()) + config_file = start_dir + "/zeronet.conf" + data_dir = start_dir + "/data" + log_dir = start_dir + "/log" + elif this_file.endswith("/core/src/Config.py"): + # Running as exe or source is at Application Support directory, put var files to outside of core dir + start_dir = this_file.replace("/core/src/Config.py", "").decode(sys.getfilesystemencoding()) + config_file = start_dir + "/zeronet.conf" + data_dir = start_dir + "/data" + log_dir = start_dir + "/log" + else: + config_file = "zeronet.conf" + data_dir = "data" + log_dir = "log" + + ip_local = ["127.0.0.1"] # Main action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)') # SiteCreate action = self.subparsers.add_parser("siteCreate", help='Create a new site') - action.register('type', 'bool', self.strToBool) - action.add_argument('--use_master_seed', help="Allow created site's private key to be recovered using the master seed in users.json (default: True)", type="bool", choices=[True, False], default=True) # SiteNeedFile action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site') @@ -156,12 +125,6 @@ class Config(object): action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address') action.add_argument('address', help='Site to verify') - # SiteCmd - action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site') - action.add_argument('address', help='Site address') - action.add_argument('cmd', help='API command name') - action.add_argument('parameters', help='Parameters of the command', nargs='?') - # dbRebuild action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache') action.add_argument('address', help='Site to rebuild') @@ -196,88 +159,46 @@ class Config(object): action.add_argument('message', help='Message to sign') action.add_argument('privatekey', help='Private key') - # Crypt Verify - action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address') - action.add_argument('message', help='Message to verify') - action.add_argument('sign', help='Signiture for message') - action.add_argument('address', help='Signer\'s address') - - # Crypt GetPrivatekey - action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed') - action.add_argument('master_seed', help='Source master seed') - action.add_argument('site_address_index', help='Site address index', type=int) - - action = self.subparsers.add_parser("getConfig", help='Return json-encoded info') - action = self.subparsers.add_parser("testConnection", help='Testing') - action = self.subparsers.add_parser("testAnnounce", help='Testing') - - self.test_parser = self.subparsers.add_parser("test", help='Run a test') - self.test_parser.add_argument('test_name', help='Test name', nargs="?") - # self.test_parser.add_argument('--benchmark', help='Run the tests multiple times to measure the performance', action='store_true') - # Config parameters self.parser.add_argument('--verbose', help='More detailed logging', action='store_true') self.parser.add_argument('--debug', help='Debug mode', action='store_true') - self.parser.add_argument('--silent', help='Only log errors to terminal output', action='store_true') self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') - self.parser.add_argument('--merge_media', help='Merge all.js and all.css', action='store_true') + self.parser.add_argument('--debug_gevent', help='Debug gevent functions', action='store_true') self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true') - self.parser.add_argument('--start_dir', help='Path of working dir for variable content (data, log, .conf)', default=self.start_dir, metavar="path") self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path") self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path") - - self.parser.add_argument('--console_log_level', help='Level of logging to console', default="default", choices=["default", "DEBUG", "INFO", "ERROR", "off"]) - self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path") - self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR", "off"]) - self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"]) - self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int) self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language') self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*') - self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*') - self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true') - self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') - self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d', + self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D', metavar='address') - self.parser.add_argument('--updatesite', help='Source code update site', default='1Update8crprmciJHwp2WXqkx2c4iYp18', + self.parser.add_argument('--updatesite', help='Source code update site', default='1UPDatEDxnvHDo7TXvq6AEBARfNkyfxsp', metavar='address') - self.parser.add_argument('--access_key', help='Plugin access key default: Random key generated at startup', default=access_key_default, metavar='key') - self.parser.add_argument('--dist_type', help='Type of installed distribution', default='source') - - self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=25, type=int, metavar='limit') - self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') + self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='size') self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit') - self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit') self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers') self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') - self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port') - self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port') - self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"]) + self.parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port') self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*') - self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*') - self.parser.add_argument('--offline', help='Disable network communication', action='store_true') self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true') self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') + self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip') self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*') - self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', metavar='path', nargs='*') - self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") - self.parser.add_argument('--use_libsecp256k1', help='Use Libsecp256k1 liblary for speedup', type='bool', choices=[True, False], default=True) - self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=True) - self.parser.add_argument('--openssl_lib_file', help='Path for OpenSSL library file (default: detect)', default=argparse.SUPPRESS, metavar="path") - self.parser.add_argument('--openssl_bin_file', help='Path for OpenSSL binary file (default: detect)', default=argparse.SUPPRESS, metavar="path") + self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path') + self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', + type='bool', choices=[True, False], default=use_openssl) self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true') self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') - self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true') self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True) self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true') @@ -289,18 +210,11 @@ class Config(object): self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)', type='bool', choices=[True, False], default=False) self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power', - type='bool', choices=[True, False], default=False) + type='bool', choices=[True, False], default=True) self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification', type='bool', choices=[True, False], default=fix_float_decimals) self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed") - self.parser.add_argument('--threads_fs_read', help='Number of threads for file read operations', default=1, type=int) - self.parser.add_argument('--threads_fs_write', help='Number of threads for file write operations', default=1, type=int) - self.parser.add_argument('--threads_crypt', help='Number of threads for cryptographic operations', default=2, type=int) - self.parser.add_argument('--threads_db', help='Number of threads for database operations', default=1, type=int) - - self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual") - self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path') @@ -308,38 +222,17 @@ class Config(object): self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051') self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050') self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password') - self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true') - self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10) - self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441) + self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services', metavar='limit', type=int, default=10) self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev)) - self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true') return self.parser def loadTrackersFile(self): - if not self.trackers_file: - self.trackers_file = ["trackers.txt", "{data_dir}/1HELLoE3sFD9569CLCbHEAVqvqV7U2Ri9d/trackers.txt"] - self.trackers = self.arguments.trackers[:] - - for trackers_file in self.trackers_file: - try: - if trackers_file.startswith("/"): # Absolute - trackers_file_path = trackers_file - elif trackers_file.startswith("{data_dir}"): # Relative to data_dir - trackers_file_path = trackers_file.replace("{data_dir}", self.data_dir) - else: # Relative to zeronet.py - trackers_file_path = self.start_dir + "/" + trackers_file - - if not os.path.exists(trackers_file_path): - continue - - for line in open(trackers_file_path): - tracker = line.strip() - if "://" in tracker and tracker not in self.trackers: - self.trackers.append(tracker) - except Exception as err: - print("Error loading trackers file: %s" % err) + self.trackers = [] + for tracker in open(self.trackers_file): + if "://" in tracker: + self.trackers.append(tracker.strip()) # Find arguments specified for current action def getActionArguments(self): @@ -351,7 +244,7 @@ class Config(object): # Try to find action from argv def getAction(self, argv): - actions = [list(action.choices.keys()) for action in self.parser._actions if action.dest == "action"][0] # Valid actions + actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions found_action = False for action in actions: # See if any in argv if action in argv: @@ -380,17 +273,8 @@ class Config(object): valid_parameters.append(arg) return valid_parameters + plugin_parameters - def getParser(self, argv): - action = self.getAction(argv) - if not action: - return self.parser - else: - return self.subparsers.choices[action] - # Parse arguments from config file and command line def parse(self, silent=False, parse_config=True): - argv = self.argv[:] # Copy command line arguments - current_parser = self.getParser(argv) if silent: # Don't display messages or quit on unknown parameter original_print_message = self.parser._print_message original_exit = self.parser.exit @@ -398,10 +282,11 @@ class Config(object): def silencer(parser, function_name): parser.exited = True return None - current_parser.exited = False - current_parser._print_message = lambda *args, **kwargs: silencer(current_parser, "_print_message") - current_parser.exit = lambda *args, **kwargs: silencer(current_parser, "exit") + self.parser.exited = False + self.parser._print_message = lambda *args, **kwargs: silencer(self.parser, "_print_message") + self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit") + argv = self.argv[:] # Copy command line arguments self.parseCommandline(argv, silent) # Parse argv self.setAttributes() if parse_config: @@ -415,19 +300,16 @@ class Config(object): self.ip_local.append(self.fileserver_ip) if silent: # Restore original functions - if current_parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action + if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action self.action = None - current_parser._print_message = original_print_message - current_parser.exit = original_exit - - self.loadTrackersFile() + self.parser._print_message = original_print_message + self.parser.exit = original_exit # Parse command line arguments def parseCommandline(self, argv, silent=False): # Find out if action is specificed on start action = self.getAction(argv) if not action: - argv.append("--end") argv.append("main") action = "main" argv = self.moveUnknownToEnd(argv, action) @@ -447,51 +329,24 @@ class Config(object): self.config_file = argv[argv.index("--config_file") + 1] # Load config file if os.path.isfile(self.config_file): - config = configparser.RawConfigParser(allow_no_value=True, strict=False) + config = ConfigParser.ConfigParser(allow_no_value=True) config.read(self.config_file) for section in config.sections(): for key, val in config.items(section): - if val == "True": - val = None if section != "global": # If not global prefix key with section key = section + "_" + key - - if key == "open_browser": # Prefer config file value over cli argument - while "--%s" % key in argv: - pos = argv.index("--open_browser") - del argv[pos:pos + 2] - - argv_extend = ["--%s" % key] if val: for line in val.strip().split("\n"): # Allow multi-line values - argv_extend.append(line) - if "\n" in val: - argv_extend.append("--end") - - argv = argv[:1] + argv_extend + argv[1:] + argv.insert(1, line) + argv.insert(1, "--%s" % key) return argv - # Return command line value of given argument - def getCmdlineValue(self, key): - if key not in self.argv: - return None - argv_index = self.argv.index(key) - if argv_index == len(self.argv) - 1: # last arg, test not specified - return None - - return self.argv[argv_index + 1] - # Expose arguments as class attributes def setAttributes(self): # Set attributes from arguments if self.arguments: args = vars(self.arguments) for key, val in args.items(): - if type(val) is list: - val = val[:] - if key in ("data_dir", "log_dir", "start_dir", "openssl_bin_file", "openssl_lib_file"): - if val: - val = val.replace("\\", "/") setattr(self, key, val) def loadPlugins(self): @@ -500,11 +355,7 @@ class Config(object): @PluginManager.acceptPlugins class ConfigPlugin(object): def __init__(self, config): - self.argv = config.argv self.parser = config.parser - self.subparsers = config.subparsers - self.test_parser = config.test_parser - self.getCmdlineValue = config.getCmdlineValue self.createArguments() def createArguments(self): @@ -525,27 +376,15 @@ class Config(object): for line in lines: if line.strip() == "[global]": global_line_i = i - if line.startswith(key + " =") or line == key: + if line.startswith(key + " = "): key_line_i = i i += 1 - if key_line_i and len(lines) > key_line_i + 1: - while True: # Delete previous multiline values - is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t") - if not is_value_line: - break - del lines[key_line_i + 1] - if value is None: # Delete line if key_line_i: del lines[key_line_i] - else: # Add / update - if type(value) is list: - value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value] - else: - value_lines = [str(value).replace("\n", "").replace("\r", "")] - new_line = "%s = %s" % (key, "\n ".join(value_lines)) + new_line = "%s = %s" % (key, str(value).replace("\n", "").replace("\r", "")) if key_line_i: # Already in the config, change the line lines[key_line_i] = new_line elif global_line_i is None: # No global section yet, append to end of file @@ -556,120 +395,4 @@ class Config(object): open(self.config_file, "w").write("\n".join(lines)) - def getServerInfo(self): - from Plugin import PluginManager - import main - - info = { - "platform": sys.platform, - "fileserver_ip": self.fileserver_ip, - "fileserver_port": self.fileserver_port, - "ui_ip": self.ui_ip, - "ui_port": self.ui_port, - "version": self.version, - "rev": self.rev, - "language": self.language, - "debug": self.debug, - "plugins": PluginManager.plugin_manager.plugin_names, - - "log_dir": os.path.abspath(self.log_dir), - "data_dir": os.path.abspath(self.data_dir), - "src_dir": os.path.dirname(os.path.abspath(__file__)) - } - - try: - info["ip_external"] = main.file_server.port_opened - info["tor_enabled"] = main.file_server.tor_manager.enabled - info["tor_status"] = main.file_server.tor_manager.status - except Exception: - pass - - return info - - def initConsoleLogger(self): - if self.action == "main": - format = '[%(asctime)s] %(name)s %(message)s' - else: - format = '%(name)s %(message)s' - - if self.console_log_level == "default": - if self.silent: - level = logging.ERROR - elif self.debug: - level = logging.DEBUG - else: - level = logging.INFO - else: - level = logging.getLevelName(self.console_log_level) - - console_logger = logging.StreamHandler() - console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S")) - console_logger.setLevel(level) - logging.getLogger('').addHandler(console_logger) - - def initFileLogger(self): - if self.action == "main": - log_file_path = "%s/debug.log" % self.log_dir - else: - log_file_path = "%s/cmd.log" % self.log_dir - - if self.log_rotate == "off": - file_logger = logging.FileHandler(log_file_path, "w", "utf-8") - else: - when_names = {"weekly": "w", "daily": "d", "hourly": "h"} - file_logger = logging.handlers.TimedRotatingFileHandler( - log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count, - encoding="utf8" - ) - - if os.path.isfile(log_file_path): - file_logger.doRollover() # Always start with empty log file - file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s')) - file_logger.setLevel(logging.getLevelName(self.log_level)) - logging.getLogger('').setLevel(logging.getLevelName(self.log_level)) - logging.getLogger('').addHandler(file_logger) - - def initLogging(self, console_logging=None, file_logging=None): - if console_logging == None: - console_logging = self.console_log_level != "off" - - if file_logging == None: - file_logging = self.log_level != "off" - - # Create necessary files and dirs - if not os.path.isdir(self.log_dir): - os.mkdir(self.log_dir) - try: - os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - except Exception as err: - print("Can't change permission of %s: %s" % (self.log_dir, err)) - - # Make warning hidden from console - logging.WARNING = 15 # Don't display warnings if not in debug mode - logging.addLevelName(15, "WARNING") - - logging.getLogger('').name = "-" # Remove root prefix - - self.error_logger = ErrorLogHandler() - self.error_logger.setLevel(logging.getLevelName("ERROR")) - logging.getLogger('').addHandler(self.error_logger) - - if console_logging: - self.initConsoleLogger() - if file_logging: - self.initFileLogger() - - -class ErrorLogHandler(logging.StreamHandler): - def __init__(self): - self.lines = [] - return super().__init__() - - def emit(self, record): - self.lines.append([time.time(), record.levelname, self.format(record)]) - - def onNewRecord(self, record): - pass - - config = Config(sys.argv) diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py index 22bcf29c..234053db 100644 --- a/src/Connection/Connection.py +++ b/src/Connection/Connection.py @@ -2,49 +2,37 @@ import socket import time import gevent -try: - from gevent.coros import RLock -except: - from gevent.lock import RLock +import msgpack from Config import config from Debug import Debug -from util import Msgpack +from util import StreamingMsgpack from Crypt import CryptConnection -from util import helper class Connection(object): __slots__ = ( - "sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "unpacker_bytes", "req_id", "ip_type", - "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "handshake_time", "last_recv_time", "is_private_ip", "is_tracker_connection", - "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", "send_lock", - "last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "waiting_requests", "waiting_streams" + "sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "req_id", + "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", + "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", + "last_ping_delay", "last_req_time", "last_cmd", "bad_actions", "sites", "name", "updateName", "waiting_requests", "waiting_streams" ) - def __init__(self, server, ip, port, sock=None, target_onion=None, is_tracker_connection=False): + def __init__(self, server, ip, port, sock=None, target_onion=None): self.sock = sock + self.ip = ip + self.port = port self.cert_pin = None if "#" in ip: - ip, self.cert_pin = ip.split("#") + self.ip, self.cert_pin = ip.split("#") self.target_onion = target_onion # Requested onion adress self.id = server.last_connection_id server.last_connection_id += 1 self.protocol = "?" self.type = "?" - self.ip_type = "?" - self.port = int(port) - self.setIp(ip) - - if helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: - self.is_private_ip = True - else: - self.is_private_ip = False - self.is_tracker_connection = is_tracker_connection self.server = server self.unpacker = None # Stream incoming socket messages here - self.unpacker_bytes = 0 # How many bytes the unpacker received self.req_id = 0 # Last request id self.handshake = {} # Handshake info got from peer self.crypt = None # Connection encryption method @@ -56,7 +44,6 @@ class Connection(object): # Stats self.start_time = time.time() - self.handshake_time = 0 self.last_recv_time = 0 self.last_message_time = 0 self.last_send_time = 0 @@ -66,12 +53,10 @@ class Connection(object): self.bytes_sent = 0 self.last_ping_delay = None self.last_req_time = 0 - self.last_cmd_sent = None - self.last_cmd_recv = None + self.last_cmd = None self.bad_actions = 0 self.sites = 0 self.cpu_time = 0.0 - self.send_lock = RLock() self.name = None self.updateName() @@ -79,18 +64,6 @@ class Connection(object): self.waiting_requests = {} # Waiting sent requests self.waiting_streams = {} # Waiting response file streams - def setIp(self, ip): - self.ip = ip - self.ip_type = helper.getIpType(ip) - self.updateName() - - def createSocket(self): - if helper.getIpType(self.ip) == "ipv6" and not hasattr(socket, "socket_noproxy"): - # Create IPv6 connection as IPv4 when using proxy - return socket.socket(socket.AF_INET6, socket.SOCK_STREAM) - else: - return socket.socket(socket.AF_INET, socket.SOCK_STREAM) - def updateName(self): self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol) @@ -118,93 +91,43 @@ class Connection(object): # Open connection to peer and wait for handshake def connect(self): + self.log("Connecting...") self.type = "out" - if self.ip_type == "onion": + if self.ip.endswith(".onion"): if not self.server.tor_manager or not self.server.tor_manager.enabled: raise Exception("Can't connect to onion addresses, no Tor controller present") self.sock = self.server.tor_manager.createSocket(self.ip, self.port) - elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: - raise Exception("Can't connect to local IPs in Tor: always mode") - elif config.trackers_proxy != "disable" and config.tor != "always" and self.is_tracker_connection: - if config.trackers_proxy == "tor": - self.sock = self.server.tor_manager.createSocket(self.ip, self.port) - else: - import socks - self.sock = socks.socksocket() - proxy_ip, proxy_port = config.trackers_proxy.split(":") - self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) else: - self.sock = self.createSocket() - - if "TCP_NODELAY" in dir(socket): - self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - timeout_before = self.sock.gettimeout() - self.sock.settimeout(30) - if self.ip_type == "ipv6" and not hasattr(self.sock, "proxy"): - sock_address = (self.ip, self.port, 1, 1) - else: - sock_address = (self.ip, self.port) - - self.sock.connect(sock_address) + self.sock = socket.create_connection((self.ip, int(self.port))) # Implicit SSL - should_encrypt = not self.ip_type == "onion" and self.ip not in self.server.broken_ssl_ips and self.ip not in config.ip_local if self.cert_pin: self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", cert_pin=self.cert_pin) self.sock.do_handshake() self.crypt = "tls-rsa" self.sock_wrapped = True - elif should_encrypt and "tls-rsa" in CryptConnection.manager.crypt_supported: - try: - self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa") - self.sock.do_handshake() - self.crypt = "tls-rsa" - self.sock_wrapped = True - except Exception as err: - if not config.force_encryption: - self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err))) - self.server.broken_ssl_ips[self.ip] = True - self.sock.close() - self.crypt = None - self.sock = self.createSocket() - self.sock.settimeout(30) - self.sock.connect(sock_address) # Detect protocol self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo()}) event_connected = self.event_connected gevent.spawn(self.messageLoop) - connect_res = event_connected.get() # Wait for handshake - self.sock.settimeout(timeout_before) - return connect_res + return event_connected.get() # Wait for handshake # Handle incoming connection def handleIncomingConnection(self, sock): self.log("Incoming connection...") - - if "TCP_NODELAY" in dir(socket): - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - self.type = "in" if self.ip not in config.ip_local: # Clearnet: Check implicit SSL try: - first_byte = sock.recv(1, gevent.socket.MSG_PEEK) - if first_byte == b"\x16": + if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16": self.log("Crypt in connection using implicit SSL") self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", True) self.sock_wrapped = True self.crypt = "tls-rsa" - except Exception as err: + except Exception, err: self.log("Socket peek error: %s" % Debug.formatException(err)) self.messageLoop() - def getMsgpackUnpacker(self): - if self.handshake and self.handshake.get("use_bin_type"): - return Msgpack.getUnpacker(fallback=True, decode=False) - else: # Backward compatibility for <0.7.0 - return Msgpack.getUnpacker(fallback=True, decode=True) - # Message loop for connection def messageLoop(self): if not self.sock: @@ -214,12 +137,11 @@ class Connection(object): self.updateName() self.connected = True buff_len = 0 - req_len = 0 - self.unpacker_bytes = 0 + self.unpacker = msgpack.Unpacker() try: while not self.closed: - buff = self.sock.recv(64 * 1024) + buff = self.sock.recv(16 * 1024) if not buff: break # Connection closed buff_len = len(buff) @@ -229,129 +151,33 @@ class Connection(object): self.incomplete_buff_recv += 1 self.bytes_recv += buff_len self.server.bytes_recv += buff_len - req_len += buff_len if not self.unpacker: - self.unpacker = self.getMsgpackUnpacker() - self.unpacker_bytes = 0 - + self.unpacker = msgpack.Unpacker() self.unpacker.feed(buff) - self.unpacker_bytes += buff_len - - while True: - try: - message = next(self.unpacker) - except StopIteration: - break - if not type(message) is dict: - if config.debug_socket: - self.log("Invalid message type: %s, content: %r, buffer: %r" % (type(message), message, buff[0:16])) - raise Exception("Invalid message type: %s" % type(message)) - - # Stats + buff = None + for message in self.unpacker: self.incomplete_buff_recv = 0 - stat_key = message.get("cmd", "unknown") - if stat_key == "response" and "to" in message: - cmd_sent = self.waiting_requests.get(message["to"], {"cmd": "unknown"})["cmd"] - stat_key = "response: %s" % cmd_sent - if stat_key == "update": - stat_key = "update: %s" % message["params"]["site"] - self.server.stat_recv[stat_key]["bytes"] += req_len - self.server.stat_recv[stat_key]["num"] += 1 if "stream_bytes" in message: - self.server.stat_recv[stat_key]["bytes"] += message["stream_bytes"] - req_len = 0 - - # Handle message - if "stream_bytes" in message: - buff_left = self.handleStream(message, buff) - self.unpacker = self.getMsgpackUnpacker() - self.unpacker.feed(buff_left) - self.unpacker_bytes = len(buff_left) - if config.debug_socket: - self.log("Start new unpacker with buff_left: %r" % buff_left) + self.handleStream(message) else: self.handleMessage(message) message = None - except Exception as err: + except Exception, err: if not self.closed: self.log("Socket error: %s" % Debug.formatException(err)) - self.server.stat_recv["error: %s" % err]["bytes"] += req_len - self.server.stat_recv["error: %s" % err]["num"] += 1 - self.close("MessageLoop ended (closed: %s)" % self.closed) # MessageLoop ended, close connection - - def getUnpackerUnprocessedBytesNum(self): - if "tell" in dir(self.unpacker): - bytes_num = self.unpacker_bytes - self.unpacker.tell() - else: - bytes_num = self.unpacker._fb_buf_n - self.unpacker._fb_buf_o - return bytes_num - - # Stream socket directly to a file - def handleStream(self, message, buff): - stream_bytes_left = message["stream_bytes"] - file = self.waiting_streams[message["to"]] - - unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum() - - if unprocessed_bytes_num: # Found stream bytes in unpacker - unpacker_stream_bytes = min(unprocessed_bytes_num, stream_bytes_left) - buff_stream_start = len(buff) - unprocessed_bytes_num - file.write(buff[buff_stream_start:buff_stream_start + unpacker_stream_bytes]) - stream_bytes_left -= unpacker_stream_bytes - else: - unpacker_stream_bytes = 0 - - if config.debug_socket: - self.log( - "Starting stream %s: %s bytes (%s from unpacker, buff size: %s, unprocessed: %s)" % - (message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unprocessed_bytes_num) - ) - - try: - while 1: - if stream_bytes_left <= 0: - break - stream_buff = self.sock.recv(min(64 * 1024, stream_bytes_left)) - if not stream_buff: - break - buff_len = len(stream_buff) - stream_bytes_left -= buff_len - file.write(stream_buff) - - # Statistics - self.last_recv_time = time.time() - self.incomplete_buff_recv += 1 - self.bytes_recv += buff_len - self.server.bytes_recv += buff_len - except Exception as err: - self.log("Stream read error: %s" % Debug.formatException(err)) - - if config.debug_socket: - self.log("End stream %s, file pos: %s" % (message["to"], file.tell())) - - self.incomplete_buff_recv = 0 - self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event - del self.waiting_streams[message["to"]] - del self.waiting_requests[message["to"]] - - if unpacker_stream_bytes: - return buff[buff_stream_start + unpacker_stream_bytes:] - else: - return b"" + self.close("MessageLoop ended") # MessageLoop ended, close connection # My handshake info def getHandshakeInfo(self): # No TLS for onion connections - if self.ip_type == "onion": - crypt_supported = [] - elif self.ip in self.server.broken_ssl_ips: + if self.ip.endswith(".onion"): crypt_supported = [] else: crypt_supported = CryptConnection.manager.crypt_supported # No peer id for onion connections - if self.ip_type == "onion" or self.ip in config.ip_local: + if self.ip.endswith(".onion") or self.ip in config.ip_local: peer_id = "" else: peer_id = self.server.peer_id @@ -364,59 +190,35 @@ class Connection(object): handshake = { "version": config.version, "protocol": "v2", - "use_bin_type": True, "peer_id": peer_id, "fileserver_port": self.server.port, - "port_opened": self.server.port_opened.get(self.ip_type, None), + "port_opened": self.server.port_opened, "target_ip": self.ip, "rev": config.rev, "crypt_supported": crypt_supported, - "crypt": self.crypt, - "time": int(time.time()) + "crypt": self.crypt } if self.target_onion: handshake["onion"] = self.target_onion - elif self.ip_type == "onion": + elif self.ip.endswith(".onion"): handshake["onion"] = self.server.tor_manager.getOnion("global") - if self.is_tracker_connection: - handshake["tracker_connection"] = True - - if config.debug_socket: - self.log("My Handshake: %s" % handshake) - return handshake def setHandshake(self, handshake): - if config.debug_socket: - self.log("Remote Handshake: %s" % handshake) - - if handshake.get("peer_id") == self.server.peer_id and not handshake.get("tracker_connection") and not self.is_tracker_connection: - self.close("Same peer id, can't connect to myself") - self.server.peer_blacklist.append((handshake["target_ip"], handshake["fileserver_port"])) - return False - self.handshake = handshake - if handshake.get("port_opened", None) is False and "onion" not in handshake and not self.is_private_ip: # Not connectable + if handshake.get("port_opened", None) is False and "onion" not in handshake: # Not connectable self.port = 0 else: - self.port = int(handshake["fileserver_port"]) # Set peer fileserver port + self.port = handshake["fileserver_port"] # Set peer fileserver port - if handshake.get("use_bin_type") and self.unpacker: - unprocessed_bytes_num = self.getUnpackerUnprocessedBytesNum() - self.log("Changing unpacker to bin type (unprocessed bytes: %s)" % unprocessed_bytes_num) - unprocessed_bytes = self.unpacker.read_bytes(unprocessed_bytes_num) - self.unpacker = self.getMsgpackUnpacker() # Create new unpacker for different msgpack type - self.unpacker_bytes = 0 - if unprocessed_bytes: - self.unpacker.feed(unprocessed_bytes) + if handshake.get("onion") and not self.ip.endswith(".onion"): # Set incoming connection's onion address + self.ip = handshake["onion"] + ".onion" + self.updateName() # Check if we can encrypt the connection - if handshake.get("crypt_supported") and self.ip not in self.server.broken_ssl_ips: - if type(handshake["crypt_supported"][0]) is bytes: - handshake["crypt_supported"] = [item.decode() for item in handshake["crypt_supported"]] # Backward compatibility - - if self.ip_type == "onion" or self.ip in config.ip_local: + if handshake.get("crypt_supported") and handshake["peer_id"] not in self.server.broken_ssl_peer_ids: + if self.ip.endswith(".onion"): crypt = None elif handshake.get("crypt"): # Recommended crypt by server crypt = handshake["crypt"] @@ -425,31 +227,18 @@ class Connection(object): if crypt: self.crypt = crypt - - if self.type == "in" and handshake.get("onion") and not self.ip_type == "onion": # Set incoming connection's onion address - if self.server.ips.get(self.ip) == self: - del self.server.ips[self.ip] - self.setIp(handshake["onion"] + ".onion") - self.log("Changing ip to %s" % self.ip) - self.server.ips[self.ip] = self - self.updateName() - self.event_connected.set(True) # Mark handshake as done self.event_connected = None - self.handshake_time = time.time() # Handle incoming message def handleMessage(self, message): - cmd = message["cmd"] - self.last_message_time = time.time() - self.last_cmd_recv = cmd - if cmd == "response": # New style response + if message.get("cmd") == "response": # New style response if message["to"] in self.waiting_requests: if self.last_send_time and len(self.waiting_requests) == 1: ping = time.time() - self.last_send_time self.last_ping_delay = ping - self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event + self.waiting_requests[message["to"]].set(message) # Set the response to event del self.waiting_requests[message["to"]] elif message["to"] == 0: # Other peers handshake ping = time.time() - self.start_time @@ -472,15 +261,23 @@ class Connection(object): self.setHandshake(message) else: self.log("Unknown response: %s" % message) - elif cmd: - self.server.num_recv += 1 - if cmd == "handshake": + elif message.get("cmd"): # Handhsake request + if message["cmd"] == "handshake": self.handleHandshake(message) else: self.server.handleRequest(self, message) + else: # Old style response, no req_id defined + if config.debug_socket: + self.log("Unknown message: %s, waiting: %s" % (message, self.waiting_requests.keys())) + if self.waiting_requests: + last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true + self.waiting_requests[last_req_id].set(message) + del self.waiting_requests[last_req_id] # Remove from waiting request # Incoming handshake set request def handleHandshake(self, message): + if config.debug_socket: + self.log("Handshake request: %s" % message) self.setHandshake(message["params"]) data = self.getHandshakeInfo() data["cmd"] = "response" @@ -493,95 +290,113 @@ class Connection(object): try: self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin) self.sock_wrapped = True - except Exception as err: - if not config.force_encryption: - self.log("Crypt connection error, adding %s:%s as broken ssl. %s" % (self.ip, self.port, Debug.formatException(err))) - self.server.broken_ssl_ips[self.ip] = True + except Exception, err: + self.log("Crypt connection error: %s, adding peerid %s as broken ssl." % (err, message["params"]["peer_id"])) + self.server.broken_ssl_peer_ids[message["params"]["peer_id"]] = True self.close("Broken ssl") if not self.sock_wrapped and self.cert_pin: self.close("Crypt connection error: Socket not encrypted, but certificate pin present") + # Stream socket directly to a file + def handleStream(self, message): + + read_bytes = message["stream_bytes"] # Bytes left we have to read from socket + try: + buff = self.unpacker.read_bytes(min(16 * 1024, read_bytes)) # Check if the unpacker has something left in buffer + except Exception, err: + buff = "" + file = self.waiting_streams[message["to"]] + if buff: + read_bytes -= len(buff) + file.write(buff) + + if config.debug_socket: + self.log("Starting stream %s: %s bytes (%s from unpacker)" % (message["to"], message["stream_bytes"], len(buff))) + + try: + while 1: + if read_bytes <= 0: + break + buff = self.sock.recv(16 * 1024) + if not buff: + break + buff_len = len(buff) + read_bytes -= buff_len + file.write(buff) + + # Statistics + self.last_recv_time = time.time() + self.incomplete_buff_recv += 1 + self.bytes_recv += buff_len + self.server.bytes_recv += buff_len + except Exception, err: + self.log("Stream read error: %s" % Debug.formatException(err)) + + if config.debug_socket: + self.log("End stream %s" % message["to"]) + + self.incomplete_buff_recv = 0 + self.waiting_requests[message["to"]].set(message) # Set the response to event + del self.waiting_streams[message["to"]] + del self.waiting_requests[message["to"]] + # Send data to connection def send(self, message, streaming=False): - self.last_send_time = time.time() if config.debug_socket: self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % ( message.get("cmd"), message.get("to"), streaming, message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"), message.get("req_id")) ) - - if not self.sock: - self.log("Send error: missing socket") - return False - - if not self.connected and message.get("cmd") != "handshake": - self.log("Wait for handshake before send request") - self.event_connected.get() - + self.last_send_time = time.time() try: - stat_key = message.get("cmd", "unknown") - if stat_key == "response": - stat_key = "response: %s" % self.last_cmd_recv - else: - self.server.num_sent += 1 - - self.server.stat_sent[stat_key]["num"] += 1 if streaming: - with self.send_lock: - bytes_sent = Msgpack.stream(message, self.sock.sendall) + bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall) + message = None self.bytes_sent += bytes_sent self.server.bytes_sent += bytes_sent - self.server.stat_sent[stat_key]["bytes"] += bytes_sent - message = None else: - data = Msgpack.pack(message) + data = msgpack.packb(message) + message = None self.bytes_sent += len(data) self.server.bytes_sent += len(data) - self.server.stat_sent[stat_key]["bytes"] += len(data) - message = None - with self.send_lock: - self.sock.sendall(data) - except Exception as err: - self.close("Send error: %s (cmd: %s)" % (err, stat_key)) + self.sock.sendall(data) + except Exception, err: + self.close("Send errror: %s" % Debug.formatException(err)) return False self.last_sent_time = time.time() return True - # Stream file to connection without msgpacking + # Stream raw file to connection def sendRawfile(self, file, read_bytes): buff = 64 * 1024 bytes_left = read_bytes - bytes_sent = 0 while True: self.last_send_time = time.time() - data = file.read(min(bytes_left, buff)) - bytes_sent += len(data) - with self.send_lock: - self.sock.sendall(data) + self.sock.sendall( + file.read(min(bytes_left, buff)) + ) bytes_left -= buff if bytes_left <= 0: break - self.bytes_sent += bytes_sent - self.server.bytes_sent += bytes_sent - self.server.stat_sent["raw_file"]["num"] += 1 - self.server.stat_sent["raw_file"]["bytes"] += bytes_sent + self.bytes_sent += read_bytes + self.server.bytes_sent += read_bytes return True # Create and send a request to peer def request(self, cmd, params={}, stream_to=None): # Last command sent more than 10 sec ago, timeout if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10: - self.close("Request %s timeout: %.3fs" % (self.last_cmd_sent, time.time() - self.last_send_time)) + self.close("Request %s timeout: %.3fs" % (self.last_cmd, time.time() - self.last_send_time)) return False self.last_req_time = time.time() - self.last_cmd_sent = cmd + self.last_cmd = cmd self.req_id += 1 data = {"cmd": cmd, "req_id": self.req_id, "params": params} event = gevent.event.AsyncResult() # Create new event for response - self.waiting_requests[self.req_id] = {"evt": event, "cmd": cmd} + self.waiting_requests[self.req_id] = event if stream_to: self.waiting_streams[self.req_id] = stream_to self.send(data) # Send request @@ -594,9 +409,9 @@ class Connection(object): with gevent.Timeout(10.0, False): try: response = self.request("ping") - except Exception as err: + except Exception, err: self.log("Ping error: %s" % Debug.formatException(err)) - if response and "body" in response and response["body"] == b"Pong!": + if response and "body" in response and response["body"] == "Pong!": self.last_ping_delay = time.time() - s return True else: @@ -616,7 +431,7 @@ class Connection(object): (reason, len(self.waiting_requests), self.sites, self.incomplete_buff_recv) ) for request in self.waiting_requests.values(): # Mark pending requests failed - request["evt"].set(False) + request.set(False) self.waiting_requests = {} self.waiting_streams = {} self.sites = 0 @@ -625,7 +440,7 @@ class Connection(object): if self.sock: self.sock.shutdown(gevent.socket.SHUT_WR) self.sock.close() - except Exception as err: + except Exception, err: if config.debug_socket: self.log("Close error: %s" % err) diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py index c9048398..43439e68 100644 --- a/src/Connection/ConnectionServer.py +++ b/src/Connection/ConnectionServer.py @@ -1,148 +1,83 @@ import logging import time import sys -import socket -from collections import defaultdict import gevent import msgpack from gevent.server import StreamServer from gevent.pool import Pool -import util -from util import helper from Debug import Debug -from .Connection import Connection +from Connection import Connection from Config import config from Crypt import CryptConnection from Crypt import CryptHash from Tor import TorManager -from Site import SiteManager -class ConnectionServer(object): +class ConnectionServer: def __init__(self, ip=None, port=None, request_handler=None): - if not ip: - if config.fileserver_ip_type == "ipv6": - ip = "::1" - else: - ip = "127.0.0.1" - port = 15441 self.ip = ip self.port = port - self.last_connection_id = 0 # Connection id incrementer - self.last_connection_id_current_version = 0 # Connection id incrementer for current client version - self.last_connection_id_supported_version = 0 # Connection id incrementer for last supported version + self.last_connection_id = 1 # Connection id incrementer self.log = logging.getLogger("ConnServer") - self.port_opened = {} - self.peer_blacklist = SiteManager.peer_blacklist + self.port_opened = None + + if config.tor != "disabled": + self.tor_manager = TorManager(self.ip, self.port) + else: + self.tor_manager = None - self.tor_manager = TorManager(self.ip, self.port) self.connections = [] # Connections self.whitelist = config.ip_local # No flood protection on this ips self.ip_incoming = {} # Incoming connections from ip in the last minute to avoid connection flood - self.broken_ssl_ips = {} # Peerids of broken ssl connections + self.broken_ssl_peer_ids = {} # Peerids of broken ssl connections self.ips = {} # Connection by ip self.has_internet = True # Internet outage detection - self.stream_server = None - self.stream_server_proxy = None - self.running = False - self.stopping = False - self.thread_checker = None + self.running = True + self.thread_checker = gevent.spawn(self.checkConnections) - self.stat_recv = defaultdict(lambda: defaultdict(int)) - self.stat_sent = defaultdict(lambda: defaultdict(int)) self.bytes_recv = 0 self.bytes_sent = 0 - self.num_recv = 0 - self.num_sent = 0 - - self.num_incoming = 0 - self.num_outgoing = 0 - self.had_external_incoming = False - - self.timecorrection = 0.0 - self.pool = Pool(500) # do not accept more than 500 connections # Bittorrent style peerid - self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64") + self.peer_id = "-ZN0%s-%s" % (config.version.replace(".", ""), CryptHash.random(12, "base64")) # Check msgpack version if msgpack.version[0] == 0 and msgpack.version[1] < 4: self.log.error( - "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`" % + "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack-python --upgrade`" % str(msgpack.version) ) sys.exit(0) - if request_handler: - self.handleRequest = request_handler - - def start(self, check_connections=True): - if self.stopping: - return False - self.running = True - if check_connections: - self.thread_checker = gevent.spawn(self.checkConnections) - CryptConnection.manager.loadCerts() - if config.tor != "disable": - self.tor_manager.start() - if not self.port: - self.log.info("No port found, not binding") - return False - - self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % ( - self.ip, self.port, ".".join(map(str, msgpack.version)), - CryptConnection.manager.crypt_supported - )) - try: + if port: # Listen server on a port + self.pool = Pool(1000) # do not accept more than 1000 connections self.stream_server = StreamServer( - (self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 + (ip.replace("*", "0.0.0.0"), port), self.handleIncomingConnection, spawn=self.pool, backlog=500 ) - except Exception as err: - self.log.info("StreamServer create error: %s" % Debug.formatException(err)) + if request_handler: + self.handleRequest = request_handler - def listen(self): - if not self.running: - return None - - if self.stream_server_proxy: - gevent.spawn(self.listenProxy) + def start(self): + self.running = True + CryptConnection.manager.loadCerts() + self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % ( + self.ip, self.port, + ".".join(map(str, msgpack.version)), CryptConnection.manager.crypt_supported) + ) try: - self.stream_server.serve_forever() - except Exception as err: - self.log.info("StreamServer listen error: %s" % err) - return False - self.log.debug("Stopped.") + self.stream_server.serve_forever() # Start normal connection server + except Exception, err: + self.log.info("StreamServer bind error, must be running already: %s" % err) def stop(self): - self.log.debug("Stopping %s" % self.stream_server) - self.stopping = True self.running = False - if self.thread_checker: - gevent.kill(self.thread_checker) - if self.stream_server: - self.stream_server.stop() - - def closeConnections(self): - self.log.debug("Closing all connection: %s" % len(self.connections)) - for connection in self.connections[:]: - connection.close("Close all connections") + self.stream_server.stop() def handleIncomingConnection(self, sock, addr): - if config.offline: - sock.close() - return False - - ip, port = addr[0:2] - ip = ip.lower() - if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping - ip = ip.replace("::ffff:", "", 1) - self.num_incoming += 1 - - if not self.had_external_incoming and not helper.isPrivateIp(ip): - self.had_external_incoming = True + ip, port = addr # Connection flood protection if ip in self.ip_incoming and ip not in self.whitelist: @@ -157,26 +92,12 @@ class ConnectionServer(object): connection = Connection(self, ip, port, sock) self.connections.append(connection) - rev = connection.handshake.get("rev", 0) - if rev >= 4560: - self.last_connection_id_supported_version += 1 - if rev == config.rev: - self.last_connection_id_current_version += 1 - if ip not in config.ip_local: - self.ips[ip] = connection + self.ips[ip] = connection connection.handleIncomingConnection(sock) - def handleMessage(self, *args, **kwargs): - pass - - def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None, is_tracker_connection=False): - ip_type = helper.getIpType(ip) - has_per_site_onion = (ip.endswith(".onion") or self.port_opened.get(ip_type, None) == False) and self.tor_manager.start_onions and site - if has_per_site_onion: # Site-unique connection for Tor - if ip.endswith(".onion"): - site_onion = self.tor_manager.getOnion(site.address) - else: - site_onion = self.tor_manager.getOnion("global") + def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None): + if ip.endswith(".onion") and self.tor_manager.start_onions and site: # Site-unique connection for Tor + site_onion = self.tor_manager.getOnion(site.address) key = ip + site_onion else: key = ip @@ -206,40 +127,24 @@ class ConnectionServer(object): return connection # No connection found - if create and not config.offline: # Allow to create new connection if not found + if create: # Allow to create new connection if not found if port == 0: raise Exception("This peer is not connectable") - - if (ip, port) in self.peer_blacklist and not is_tracker_connection: - raise Exception("This peer is blacklisted") - try: - if has_per_site_onion: # Lock connection to site - connection = Connection(self, ip, port, target_onion=site_onion, is_tracker_connection=is_tracker_connection) + if ip.endswith(".onion") and self.tor_manager.start_onions and site: # Lock connection to site + connection = Connection(self, ip, port, target_onion=site_onion) else: - connection = Connection(self, ip, port, is_tracker_connection=is_tracker_connection) - self.num_outgoing += 1 + connection = Connection(self, ip, port) self.ips[key] = connection self.connections.append(connection) - connection.log("Connecting... (site: %s)" % site) succ = connection.connect() if not succ: connection.close("Connection event return error") raise Exception("Connection event return error") - else: - rev = connection.handshake.get("rev", 0) - if rev >= 4560: - self.last_connection_id_supported_version += 1 - if rev == config.rev: - self.last_connection_id_current_version += 1 - except Exception as err: + except Exception, err: connection.close("%s Connect error: %s" % (ip, Debug.formatException(err))) raise err - - if len(self.connections) > config.global_connected_limit: - gevent.spawn(self.checkMaxConnections) - return connection else: return None @@ -261,53 +166,46 @@ class ConnectionServer(object): def checkConnections(self): run_i = 0 - time.sleep(15) while self.running: run_i += 1 + time.sleep(60) # Check every minute self.ip_incoming = {} # Reset connected ips counter + self.broken_ssl_peer_ids = {} # Reset broken ssl peerids count last_message_time = 0 - s = time.time() for connection in self.connections[:]: # Make a copy - if connection.ip.endswith(".onion") or config.tor == "always": - timeout_multipler = 2 - else: - timeout_multipler = 1 - idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) - if connection.last_message_time > last_message_time and not connection.is_private_ip: - # Message from local IPs does not means internet connection - last_message_time = connection.last_message_time + last_message_time = max(last_message_time, connection.last_message_time) if connection.unpacker and idle > 30: # Delete the unpacker if not needed del connection.unpacker connection.unpacker = None - elif connection.last_cmd_sent == "announce" and idle > 20: # Bootstrapper connection close after 20 sec - connection.close("[Cleanup] Tracker connection, idle: %.3fs" % idle) + elif connection.last_cmd == "announce" and idle > 20: # Bootstrapper connection close after 20 sec + connection.close("[Cleanup] Tracker connection: %s" % idle) if idle > 60 * 60: # Wake up after 1h - connection.close("[Cleanup] After wakeup, idle: %.3fs" % idle) + connection.close("[Cleanup] After wakeup, idle: %s" % idle) elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: # Idle more than 20 min and we have not sent request in last 10 sec if not connection.ping(): connection.close("[Cleanup] Ping timeout") - elif idle > 10 * timeout_multipler and connection.incomplete_buff_recv > 0: + elif idle > 10 and connection.incomplete_buff_recv > 0: # Incomplete data with more than 10 sec idle connection.close("[Cleanup] Connection buff stalled") - elif idle > 10 * timeout_multipler and connection.protocol == "?": # No connection after 10 sec - connection.close( - "[Cleanup] Connect timeout: %.3fs" % idle - ) - - elif idle > 10 * timeout_multipler and connection.waiting_requests and time.time() - connection.last_send_time > 10 * timeout_multipler: + elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 20: # Sent command and no response in 10 sec connection.close( - "[Cleanup] Command %s timeout: %.3fs" % (connection.last_cmd_sent, time.time() - connection.last_send_time) + "[Cleanup] Command %s timeout: %.3fs" % (connection.last_cmd, time.time() - connection.last_send_time) + ) + + elif idle > 30 and connection.protocol == "?": # No connection after 30 sec + connection.close( + "[Cleanup] Connect timeout: %.3fs" % idle ) elif idle < 60 and connection.bad_actions > 40: @@ -315,19 +213,19 @@ class ConnectionServer(object): "[Cleanup] Too many bad actions: %s" % connection.bad_actions ) - elif idle > 5 * 60 and connection.sites == 0: + elif idle > 5*60 and connection.sites == 0: connection.close( "[Cleanup] No site for connection" ) - elif run_i % 90 == 0: + elif run_i % 30 == 0: # Reset bad action counter every 30 min connection.bad_actions = 0 # Internet outage detection - if time.time() - last_message_time > max(60, 60 * 10 / max(1, float(len(self.connections)) / 50)): + if time.time() - last_message_time > max(60, 60*10/max(1,float(len(self.connections))/50)): # Offline: Last message more than 60-600sec depending on connection number - if self.has_internet and last_message_time: + if self.has_internet: self.has_internet = False self.onInternetOffline() else: @@ -336,51 +234,8 @@ class ConnectionServer(object): self.has_internet = True self.onInternetOnline() - self.timecorrection = self.getTimecorrection() - - if time.time() - s > 0.01: - self.log.debug("Connection cleanup in %.3fs" % (time.time() - s)) - - time.sleep(15) - self.log.debug("Checkconnections ended") - - @util.Noparallel(blocking=False) - def checkMaxConnections(self): - if len(self.connections) < config.global_connected_limit: - return 0 - - s = time.time() - num_connected_before = len(self.connections) - self.connections.sort(key=lambda connection: connection.sites) - num_closed = 0 - for connection in self.connections: - idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) - if idle > 60: - connection.close("Connection limit reached") - num_closed += 1 - if num_closed > config.global_connected_limit * 0.1: - break - - self.log.debug("Closed %s connections of %s after reached limit %s in %.3fs" % ( - num_closed, num_connected_before, config.global_connected_limit, time.time() - s - )) - return num_closed - def onInternetOnline(self): self.log.info("Internet online") def onInternetOffline(self): - self.had_external_incoming = False self.log.info("Internet offline") - - def getTimecorrection(self): - corrections = sorted([ - connection.handshake.get("time") - connection.handshake_time + connection.last_ping_delay - for connection in self.connections - if connection.handshake.get("time") and connection.last_ping_delay - ]) - if len(corrections) < 9: - return 0.0 - mid = int(len(corrections) / 2 - 1) - median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3 - return median diff --git a/src/Connection/__init__.py b/src/Connection/__init__.py index d419a3f0..5bd29c6e 100644 --- a/src/Connection/__init__.py +++ b/src/Connection/__init__.py @@ -1,2 +1,2 @@ -from .ConnectionServer import ConnectionServer -from .Connection import Connection +from ConnectionServer import ConnectionServer +from Connection import Connection diff --git a/src/Content/ContentDb.py b/src/Content/ContentDb.py index f284581e..13a5002f 100644 --- a/src/Content/ContentDb.py +++ b/src/Content/ContentDb.py @@ -1,6 +1,7 @@ +import time import os -from Db.Db import Db, DbTableError +from Db import Db from Config import config from Plugin import PluginManager from Debug import Debug @@ -11,29 +12,15 @@ class ContentDb(Db): def __init__(self, path): Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) self.foreign_keys = True - - def init(self): try: self.schema = self.getSchema() - try: - self.checkTables() - except DbTableError: - pass - self.log.debug("Checking foreign keys...") - foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone() - if foreign_key_error: - raise Exception("Database foreign key error: %s" % foreign_key_error) - except Exception as err: + self.checkTables() + except Exception, err: self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err)) self.close() - os.unlink(self.db_path) # Remove and try again - Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, self.db_path) - self.foreign_keys = True + os.unlink(path) # Remove and try again self.schema = self.getSchema() - try: - self.checkTables() - except DbTableError: - pass + self.checkTables() self.site_ids = {} self.sites = {} @@ -102,9 +89,9 @@ class ContentDb(Db): def setContent(self, site, inner_path, content, size=0): self.insertOrUpdate("content", { "size": size, - "size_files": sum([val["size"] for key, val in content.get("files", {}).items()]), - "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).items()]), - "modified": int(content.get("modified", 0)) + "size_files": sum([val["size"] for key, val in content.get("files", {}).iteritems()]), + "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).iteritems()]), + "modified": int(content["modified"]) }, { "site_id": self.site_ids.get(site.address, 0), "inner_path": inner_path @@ -129,23 +116,21 @@ class ContentDb(Db): params = {"site_id": self.site_ids.get(site.address, 0)} if ignore: params["not__inner_path"] = ignore - res = self.execute("SELECT SUM(size) + SUM(size_files) AS size, SUM(size_files_optional) AS size_optional FROM content WHERE ?", params) - row = dict(res.fetchone()) + res = self.execute("SELECT SUM(size) + SUM(size_files) AS size FROM content WHERE ?", params) + return res.fetchone()["size"] - if not row["size"]: - row["size"] = 0 - if not row["size_optional"]: - row["size_optional"] = 0 + def getOptionalSize(self, site): + res = self.execute( + "SELECT SUM(size_files_optional) AS size FROM content WHERE ?", + {"site_id": self.site_ids.get(site.address, 0)} + ) + return res.fetchone()["size"] - return row["size"], row["size_optional"] - - def listModified(self, site, after=None, before=None): - params = {"site_id": self.site_ids.get(site.address, 0)} - if after: - params["modified>"] = after - if before: - params["modified<"] = before - res = self.execute("SELECT inner_path, modified FROM content WHERE ?", params) + def listModified(self, site, since): + res = self.execute( + "SELECT inner_path, modified FROM content WHERE site_id = :site_id AND modified > :since", + {"site_id": self.site_ids.get(site.address, 0), "since": since} + ) return {row["inner_path"]: row["modified"] for row in res} content_dbs = {} @@ -156,7 +141,6 @@ def getContentDb(path=None): path = "%s/content.db" % config.data_dir if path not in content_dbs: content_dbs[path] = ContentDb(path) - content_dbs[path].init() return content_dbs[path] getContentDb() # Pre-connect to default one diff --git a/src/Content/ContentDbDict.py b/src/Content/ContentDbDict.py index 01df0427..a41640a8 100644 --- a/src/Content/ContentDbDict.py +++ b/src/Content/ContentDbDict.py @@ -1,10 +1,7 @@ import time import os -from . import ContentDb -from Debug import Debug -from Config import config - +import ContentDb class ContentDbDict(dict): def __init__(self, site, *args, **kwargs): @@ -22,10 +19,7 @@ class ContentDbDict(dict): try: self.num_loaded += 1 if self.num_loaded % 100 == 0: - if config.verbose: - self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack())) - else: - self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key)) + self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key)) content = self.site.storage.loadJson(key) dict.__setitem__(self, key, content) except IOError: @@ -79,7 +73,7 @@ class ContentDbDict(dict): for key in dict.keys(self): try: val = self[key] - except Exception as err: + except Exception, err: self.log.warning("Error loading %s: %s" % (key, err)) continue yield key, val @@ -89,7 +83,7 @@ class ContentDbDict(dict): for key in dict.keys(self): try: val = self[key] - except Exception as err: + except Exception, err: self.log.warning("Error loading %s: %s" % (key, err)) continue back.append((key, val)) @@ -111,11 +105,6 @@ class ContentDbDict(dict): return self.__getitem__(key) except KeyError: return default - except Exception as err: - self.site.bad_files[key] = self.site.bad_files.get(key, 1) - dict.__delitem__(self, key) - self.log.warning("Error loading %s: %s" % (key, err)) - return default def execute(self, query, params={}): params["site_id"] = self.db_id @@ -127,29 +116,29 @@ if __name__ == "__main__": s_mem = process.memory_info()[0] / float(2 ** 20) root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27" contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root) - print("Init len", len(contents)) + print "Init len", len(contents) s = time.time() for dir_name in os.listdir(root + "/data/users/")[0:8000]: contents["data/users/%s/content.json" % dir_name] - print("Load: %.3fs" % (time.time() - s)) + print "Load: %.3fs" % (time.time() - s) s = time.time() found = 0 - for key, val in contents.items(): + for key, val in contents.iteritems(): found += 1 assert key assert val - print("Found:", found) - print("Iteritem: %.3fs" % (time.time() - s)) + print "Found:", found + print "Iteritem: %.3fs" % (time.time() - s) s = time.time() found = 0 - for key in list(contents.keys()): + for key in contents.keys(): found += 1 assert key in contents - print("In: %.3fs" % (time.time() - s)) + print "In: %.3fs" % (time.time() - s) - print("Len:", len(list(contents.values())), len(list(contents.keys()))) + print "Len:", len(contents.values()), len(contents.keys()) - print("Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem) + print "Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index 623cc707..f97eedd5 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -3,8 +3,6 @@ import time import re import os import copy -import base64 -import sys import gevent @@ -13,21 +11,10 @@ from Crypt import CryptHash from Config import config from util import helper from util import Diff -from util import SafeRe from Peer import PeerHashfield -from .ContentDbDict import ContentDbDict -from Plugin import PluginManager +from ContentDbDict import ContentDbDict -class VerifyError(Exception): - pass - - -class SignError(Exception): - pass - - -@PluginManager.acceptPlugins class ContentManager(object): def __init__(self, site): @@ -40,33 +27,20 @@ class ContentManager(object): # Load all content.json files def loadContents(self): if len(self.contents) == 0: - self.log.info("ContentDb not initialized, load files from filesystem...") + self.log.debug("ContentDb not initialized, load files from filesystem") self.loadContent(add_bad_files=False, delete_removed_files=False) - self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() + self.site.settings["size"] = self.getTotalSize() # Load hashfield cache if "hashfield" in self.site.settings.get("cache", {}): - self.hashfield.frombytes(base64.b64decode(self.site.settings["cache"]["hashfield"])) + self.hashfield.fromstring(self.site.settings["cache"]["hashfield"].decode("base64")) del self.site.settings["cache"]["hashfield"] - elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0: + elif self.contents.get("content.json") and self.getOptionalSize() > 0: self.site.storage.updateBadFiles() # No hashfield cache created yet self.has_optional_files = bool(self.hashfield) self.contents.db.initSite(self.site) - def getFileChanges(self, old_files, new_files): - deleted = {key: val for key, val in old_files.items() if key not in new_files} - deleted_hashes = {val.get("sha512"): key for key, val in old_files.items() if key not in new_files} - added = {key: val for key, val in new_files.items() if key not in old_files} - renamed = {} - for relative_path, node in added.items(): - hash = node.get("sha512") - if hash in deleted_hashes: - relative_path_old = deleted_hashes[hash] - renamed[relative_path_old] = relative_path - del(deleted[relative_path_old]) - return list(deleted), renamed - # Load content.json to self.content # Return: Changed files ["index.html", "data/messages.json"], Deleted files ["old.jpg"] def loadContent(self, content_inner_path="content.json", add_bad_files=True, delete_removed_files=True, load_includes=True, force=False): @@ -83,17 +57,17 @@ class ContentManager(object): for line in open(content_path): if '"modified"' not in line: continue - match = re.search(r"([0-9\.]+),$", line.strip(" \r\n")) + match = re.search("([0-9\.]+),$", line.strip(" \r\n")) if match and float(match.group(1)) <= old_content.get("modified", 0): self.log.debug("%s loadContent same json file, skipping" % content_inner_path) return [], [] - new_content = self.site.storage.loadJson(content_inner_path) - except Exception as err: + new_content = json.load(open(content_path)) + except Exception, err: self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err))) return [], [] else: - self.log.debug("Content.json not exist: %s" % content_path) + self.log.warning("Content.json not exist: %s" % content_path) return [], [] # Content.json not exist try: @@ -101,7 +75,7 @@ class ContentManager(object): changed = [] deleted = [] # Check changed - for relative_path, info in new_content.get("files", {}).items(): + for relative_path, info in new_content.get("files", {}).iteritems(): if "sha512" in info: hash_type = "sha512" else: # Backward compatibility @@ -116,7 +90,7 @@ class ContentManager(object): changed.append(content_inner_dir + relative_path) # Check changed optional files - for relative_path, info in new_content.get("files_optional", {}).items(): + for relative_path, info in new_content.get("files_optional", {}).iteritems(): file_inner_path = content_inner_dir + relative_path new_hash = info["sha512"] if old_content and old_content.get("files_optional", {}).get(relative_path): @@ -126,12 +100,11 @@ class ContentManager(object): changed.append(file_inner_path) # Download new file elif old_hash != new_hash and self.hashfield.hasHash(old_hash) and not self.site.settings.get("own"): try: - old_hash_id = self.hashfield.getHashId(old_hash) - self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"]) - self.optionalDelete(file_inner_path) + self.optionalRemove(file_inner_path, old_hash, old_content["files_optional"][relative_path]["size"]) + self.site.storage.delete(file_inner_path) self.log.debug("Deleted changed optional file: %s" % file_inner_path) - except Exception as err: - self.log.warning("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) + except Exception, err: + self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) else: # The file is not in the old content if self.site.isDownloadable(file_inner_path): changed.append(file_inner_path) # Download new file @@ -148,39 +121,23 @@ class ContentManager(object): **new_content.get("files_optional", {}) ) - deleted, renamed = self.getFileChanges(old_files, new_files) - - for relative_path_old, relative_path_new in renamed.items(): - self.log.debug("Renaming: %s -> %s" % (relative_path_old, relative_path_new)) - if relative_path_new in new_content.get("files_optional", {}): - self.optionalRenamed(content_inner_dir + relative_path_old, content_inner_dir + relative_path_new) - if self.site.storage.isFile(relative_path_old): - try: - self.site.storage.rename(relative_path_old, relative_path_new) - if relative_path_new in changed: - changed.remove(relative_path_new) - self.log.debug("Renamed: %s -> %s" % (relative_path_old, relative_path_new)) - except Exception as err: - self.log.warning("Error renaming file: %s -> %s %s" % (relative_path_old, relative_path_new, err)) - + deleted = [key for key in old_files if key not in new_files] if deleted and not self.site.settings.get("own"): # Deleting files that no longer in content.json for file_relative_path in deleted: file_inner_path = content_inner_dir + file_relative_path try: + self.site.storage.delete(file_inner_path) + # Check if the deleted file is optional if old_content.get("files_optional") and old_content["files_optional"].get(file_relative_path): - self.optionalDelete(file_inner_path) old_hash = old_content["files_optional"][file_relative_path].get("sha512") if self.hashfield.hasHash(old_hash): - old_hash_id = self.hashfield.getHashId(old_hash) - self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][file_relative_path]["size"]) - else: - self.site.storage.delete(file_inner_path) + self.optionalRemove(file_inner_path, old_hash, old_content["files_optional"][file_relative_path]["size"]) self.log.debug("Deleted file: %s" % file_inner_path) - except Exception as err: - self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) + except Exception, err: + self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) # Cleanup empty dirs tree = {root: [dirs, files] for root, dirs, files in os.walk(self.site.storage.getPath(content_inner_dir))} @@ -193,7 +150,7 @@ class ContentManager(object): self.site.storage.deleteDir(root_inner_path) # Remove from tree dict to reflect changed state tree[os.path.dirname(root)][0].remove(os.path.basename(root)) - except Exception as err: + except Exception, err: self.log.debug("Error deleting empty directory %s: %s" % (root_inner_path, err)) # Check archived @@ -203,49 +160,20 @@ class ContentManager(object): self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived))) archived_changed = { key: date_archived - for key, date_archived in new_archived.items() + for key, date_archived in new_archived.iteritems() if old_archived.get(key) != new_archived[key] } if archived_changed: self.log.debug("Archived changed: %s" % archived_changed) - for archived_dirname, date_archived in archived_changed.items(): + for archived_dirname, date_archived in archived_changed.iteritems(): archived_inner_path = content_inner_dir + archived_dirname + "/content.json" if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived: self.removeContent(archived_inner_path) - deleted += archived_inner_path - self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() - - # Check archived before - if old_content and "user_contents" in new_content and "archived_before" in new_content["user_contents"]: - old_archived_before = old_content.get("user_contents", {}).get("archived_before", 0) - new_archived_before = new_content.get("user_contents", {}).get("archived_before", 0) - if old_archived_before != new_archived_before: - self.log.debug("Archived before changed: %s -> %s" % (old_archived_before, new_archived_before)) - - # Remove downloaded archived files - num_removed_contents = 0 - for archived_inner_path in self.listModified(before=new_archived_before): - if archived_inner_path.startswith(content_inner_dir) and archived_inner_path != content_inner_path: - self.removeContent(archived_inner_path) - num_removed_contents += 1 - self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() - - # Remove archived files from download queue - num_removed_bad_files = 0 - for bad_file in list(self.site.bad_files.keys()): - if bad_file.endswith("content.json"): - del self.site.bad_files[bad_file] - num_removed_bad_files += 1 - - if num_removed_bad_files > 0: - self.site.worker_manager.removeSolvedFileTasks(mark_as_good=False) - gevent.spawn(self.site.update, since=0) - - self.log.debug("Archived removed contents: %s, removed bad files: %s" % (num_removed_contents, num_removed_bad_files)) + self.site.settings["size"] = self.getTotalSize() # Load includes if load_includes and "includes" in new_content: - for relative_path, info in list(new_content["includes"].items()): + for relative_path, info in new_content["includes"].items(): include_inner_path = content_inner_dir + relative_path if self.site.storage.isFile(include_inner_path): # Content.json exists, load it include_changed, include_deleted = self.loadContent( @@ -283,7 +211,7 @@ class ContentManager(object): self.has_optional_files = True # Update the content self.contents[content_inner_path] = new_content - except Exception as err: + except Exception, err: self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err))) return [], [] # Content.json parse error @@ -294,7 +222,6 @@ class ContentManager(object): for inner_path in deleted: if inner_path in self.site.bad_files: del self.site.bad_files[inner_path] - self.site.worker_manager.removeSolvedFileTasks() if new_content.get("modified", 0) > self.site.settings.get("modified", 0): # Dont store modifications in the far future (more than 10 minute) @@ -310,7 +237,7 @@ class ContentManager(object): content.get("files", {}), **content.get("files_optional", {}) ) - except Exception as err: + except Exception, err: self.log.debug("Error loading %s for removeContent: %s" % (inner_path, Debug.formatException(err))) files = {} files["content.json"] = True @@ -320,92 +247,67 @@ class ContentManager(object): try: self.site.storage.delete(file_inner_path) self.log.debug("Deleted file: %s" % file_inner_path) - except Exception as err: + except Exception, err: self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) try: self.site.storage.deleteDir(inner_dir) - except Exception as err: + except Exception, err: self.log.debug("Error deleting dir %s: %s" % (inner_dir, err)) try: del self.contents[inner_path] - except Exception as err: + except Exception, err: self.log.debug("Error key from contents: %s" % inner_path) # Get total size of site # Return: 32819 (size of files in kb) def getTotalSize(self, ignore=None): - return self.contents.db.getTotalSize(self.site, ignore) + size = self.contents.db.getTotalSize(self.site, ignore) + if size: + return size + else: + return 0 - def listModified(self, after=None, before=None): - return self.contents.db.listModified(self.site, after=after, before=before) + def getOptionalSize(self): + size = self.contents.db.getOptionalSize(self.site) + if size: + return size + else: + return 0 + + def listModified(self, since): + return self.contents.db.listModified(self.site, since) def listContents(self, inner_path="content.json", user_files=False): if inner_path not in self.contents: return [] back = [inner_path] content_inner_dir = helper.getDirname(inner_path) - for relative_path in list(self.contents[inner_path].get("includes", {}).keys()): + for relative_path in self.contents[inner_path].get("includes", {}).keys(): include_inner_path = content_inner_dir + relative_path back += self.listContents(include_inner_path) return back # Returns if file with the given modification date is archived or not def isArchived(self, inner_path, modified): - match = re.match(r"(.*)/(.*?)/", inner_path) + file_info = self.getFileInfo(inner_path) + match = re.match(".*/(.*?)/", inner_path) if not match: return False - user_contents_inner_path = match.group(1) + "/content.json" - relative_directory = match.group(2) - - file_info = self.getFileInfo(user_contents_inner_path) - if file_info: - time_archived_before = file_info.get("archived_before", 0) - time_directory_archived = file_info.get("archived", {}).get(relative_directory, 0) - if modified <= time_archived_before or modified <= time_directory_archived: - return True - else: - return False + relative_directory = match.group(1) + if file_info and file_info.get("archived", {}).get(relative_directory) >= modified: + return True else: return False - def isDownloaded(self, inner_path, hash_id=None): - if not hash_id: - file_info = self.getFileInfo(inner_path) - if not file_info or "sha512" not in file_info: - return False - hash_id = self.hashfield.getHashId(file_info["sha512"]) - return hash_id in self.hashfield - - # Is modified since signing - def isModified(self, inner_path): - s = time.time() - if inner_path.endswith("content.json"): - try: - is_valid = self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False) - if is_valid: - is_modified = False - else: - is_modified = True - except VerifyError: - is_modified = True - else: - try: - self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False) - is_modified = False - except VerifyError: - is_modified = True - return is_modified - # Find the file info line from self.contents # Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"} - def getFileInfo(self, inner_path, new_file=False): + def getFileInfo(self, inner_path): dirs = inner_path.split("/") # Parent dirs of content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json while True: content_inner_path = "%s/content.json" % "/".join(dirs) - content_inner_path = content_inner_path.strip("/") - content = self.contents.get(content_inner_path) + content = self.contents.get(content_inner_path.strip("/")) # Check in files if content and "files" in content: @@ -428,22 +330,8 @@ class ContentManager(object): # Return the rules if user dir if content and "user_contents" in content: back = content["user_contents"] - content_inner_path_dir = helper.getDirname(content_inner_path) - relative_content_path = inner_path[len(content_inner_path_dir):] - user_auth_address_match = re.match(r"([A-Za-z0-9]+)/.*", relative_content_path) - if user_auth_address_match: - user_auth_address = user_auth_address_match.group(1) - back["content_inner_path"] = "%s%s/content.json" % (content_inner_path_dir, user_auth_address) - else: - back["content_inner_path"] = content_inner_path_dir + "content.json" - back["optional"] = None - back["relative_path"] = "/".join(inner_path_parts) - return back - - if new_file and content: - back = {} - back["content_inner_path"] = content_inner_path - back["relative_path"] = "/".join(inner_path_parts) + # Content.json is in the users dir + back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path) back["optional"] = None return back @@ -464,12 +352,6 @@ class ContentManager(object): if not file_info: return False # File not found inner_path = file_info["content_inner_path"] - - if inner_path == "content.json": # Root content.json - rules = {} - rules["signers"] = self.getValidSigners(inner_path, content) - return rules - dirs = inner_path.split("/") # Parent dirs of content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir @@ -492,13 +374,7 @@ class ContentManager(object): # Return: The rules of the file or False if not allowed def getUserContentRules(self, parent_content, inner_path, content): user_contents = parent_content["user_contents"] - - # Delivered for directory - if "inner_path" in parent_content: - parent_content_dir = helper.getDirname(parent_content["inner_path"]) - user_address = re.match(r"([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1) - else: - user_address = re.match(r".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) + user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory try: if not content: @@ -509,11 +385,7 @@ class ContentManager(object): user_urn = "n-a/n-a" cert_user_id = "n-a" - if user_address in user_contents["permissions"]: - rules = copy.copy(user_contents["permissions"].get(user_address, {})) # Default rules based on address - else: - rules = copy.copy(user_contents["permissions"].get(cert_user_id, {})) # Default rules based on username - + rules = copy.copy(user_contents["permissions"].get(cert_user_id, {})) # Default rules by username if rules is False: banned = True rules = {} @@ -521,11 +393,11 @@ class ContentManager(object): banned = False if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers - for permission_pattern, permission_rules in list(user_contents["permission_rules"].items()): # Regexp rules - if not SafeRe.match(permission_pattern, user_urn): + for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules + if not re.match(permission_pattern, user_urn): continue # Rule is not valid for user # Update rules if its better than current recorded ones - for key, val in permission_rules.items(): + for key, val in permission_rules.iteritems(): if key not in rules: if type(val) is list: rules[key] = val[:] # Make copy @@ -540,10 +412,7 @@ class ContentManager(object): elif type(val) is list: # List, append rules[key] += val - # Accepted cert signers - rules["cert_signers"] = user_contents.get("cert_signers", {}) - rules["cert_signers_pattern"] = user_contents.get("cert_signers_pattern") - + rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers if "signers" not in rules: rules["signers"] = [] @@ -581,82 +450,50 @@ class ContentManager(object): self.site.storage.delete(file_inner_path + "-old") return diffs - def hashFile(self, dir_inner_path, file_relative_path, optional=False): - back = {} - file_inner_path = dir_inner_path + "/" + file_relative_path - - file_path = self.site.storage.getPath(file_inner_path) - file_size = os.path.getsize(file_path) - sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file - if optional and not self.hashfield.hasHash(sha512sum): - self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(sha512sum), file_size, own=True) - - back[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} - return back - - def isValidRelativePath(self, relative_path): - if ".." in relative_path.replace("\\", "/").split("/"): - return False - elif len(relative_path) > 255: - return False - elif relative_path[0] in ("/", "\\"): # Starts with - return False - elif relative_path[-1] in (".", " "): # Ends with - return False - elif re.match(r".*(^|/)(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]|CONOUT\$|CONIN\$)(\.|/|$)", relative_path, re.IGNORECASE): # Protected on Windows - return False - else: - return re.match(r"^[^\x00-\x1F\"*:<>?\\|]+$", relative_path) - - def sanitizePath(self, inner_path): - return re.sub("[\x00-\x1F\"*:<>?\\|]", "", inner_path) - # Hash files in directory def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None): files_node = {} files_optional_node = {} - db_inner_path = self.site.storage.getDbFile() - if dir_inner_path and not self.isValidRelativePath(dir_inner_path): + if not re.match("^[a-zA-Z0-9_@=\.\+-/]*$", dir_inner_path): ignored = True self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path) - for file_relative_path in self.site.storage.walk(dir_inner_path, ignore_pattern): + for file_relative_path in self.site.storage.walk(dir_inner_path): file_name = helper.getFilename(file_relative_path) ignored = optional = False if file_name == "content.json": ignored = True + elif ignore_pattern and re.match(ignore_pattern, file_relative_path): + ignored = True elif file_name.startswith(".") or file_name.endswith("-old") or file_name.endswith("-new"): ignored = True - elif not self.isValidRelativePath(file_relative_path): + elif not re.match("^[a-zA-Z0-9_@=\.\+\-/]+$", file_relative_path): ignored = True - self.log.error("- [ERROR] Invalid filename: %s" % file_relative_path) - elif dir_inner_path == "" and db_inner_path and file_relative_path.startswith(db_inner_path): - ignored = True - elif optional_pattern and SafeRe.match(optional_pattern, file_relative_path): + self.log.error("- [ERROR] Only ascii encoded filenames allowed: %s" % file_relative_path) + elif optional_pattern and re.match(optional_pattern, file_relative_path): optional = True if ignored: # Ignore content.json, defined regexp and files starting with . self.log.info("- [SKIPPED] %s" % file_relative_path) else: + file_inner_path = dir_inner_path + "/" + file_relative_path + file_path = self.site.storage.getPath(file_inner_path) + sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file if optional: - self.log.info("- [OPTIONAL] %s" % file_relative_path) - files_optional_node.update( - self.hashFile(dir_inner_path, file_relative_path, optional=True) - ) + self.log.info("- [OPTIONAL] %s (SHA512: %s)" % (file_relative_path, sha512sum)) + file_size = os.path.getsize(file_path) + files_optional_node[file_relative_path] = {"sha512": sha512sum, "size": file_size} + if not self.hashfield.hasHash(sha512sum): + self.optionalDownloaded(file_inner_path, sha512sum, file_size, own=True) else: - self.log.info("- %s" % file_relative_path) - files_node.update( - self.hashFile(dir_inner_path, file_relative_path) - ) + self.log.info("- %s (SHA512: %s)" % (file_relative_path, sha512sum)) + files_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} return files_node, files_optional_node # Create and sign a content.json # Return: The new content if filewrite = False def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None, remove_missing_optional=False): - if not inner_path.endswith("content.json"): - raise SignError("Invalid file name, you can only sign content.json files") - if inner_path in self.contents: content = self.contents.get(inner_path) if content and content.get("cert_sign", False) is None and self.site.storage.isFile(inner_path): @@ -684,10 +521,9 @@ class ContentManager(object): if extend: # Add extend keys if not exists - for key, val in list(extend.items()): - if not content.get(key): + for key, val in extend.items(): + if key not in content: content[key] = val - self.log.info("Extending content.json with: %s" % key) directory = helper.getDirname(self.site.storage.getPath(inner_path)) inner_directory = helper.getDirname(inner_path) @@ -699,14 +535,14 @@ class ContentManager(object): ) if not remove_missing_optional: - for file_inner_path, file_details in content.get("files_optional", {}).items(): + for file_inner_path, file_details in content.get("files_optional", {}).iteritems(): if file_inner_path not in files_optional_node: files_optional_node[file_inner_path] = file_details # Find changed files files_merged = files_node.copy() files_merged.update(files_optional_node) - for file_relative_path, file_details in files_merged.items(): + for file_relative_path, file_details in files_merged.iteritems(): old_hash = content.get("files", {}).get(file_relative_path, {}).get("sha512") new_hash = files_merged[file_relative_path]["sha512"] if old_hash != new_hash: @@ -727,6 +563,7 @@ class ContentManager(object): elif "files_optional" in new_content: del new_content["files_optional"] + new_content["modified"] = int(time.time()) # Add timestamp if inner_path == "content.json": new_content["zeronet_version"] = config.version new_content["signs_required"] = content.get("signs_required", 1) @@ -740,53 +577,41 @@ class ContentManager(object): privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey) valid_signers = self.getValidSigners(inner_path, new_content) if privatekey_address not in valid_signers: - raise SignError( + return self.log.error( "Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address) ) self.log.info("Correct %s in valid signers: %s" % (privatekey_address, valid_signers)) - signs_required = 1 if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key, then sign the valid signers - signs_required = new_content["signs_required"] - signers_data = "%s:%s" % (signs_required, ",".join(valid_signers)) - new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey) + new_content["signers_sign"] = CryptBitcoin.sign( + "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey + ) if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none") self.log.info("Signing %s..." % inner_path) if "signs" in new_content: - # del(new_content["signs"]) # Delete old signs - old_signs_content = new_content["signs"] - del(new_content["signs"]) - else: - old_signs_content = None + del(new_content["signs"]) # Delete old signs if "sign" in new_content: del(new_content["sign"]) # Delete old sign (backward compatibility) - if signs_required > 1: - has_valid_sign = False - sign_content = json.dumps(new_content, sort_keys=True) - for signer in valid_signers: - res = CryptBitcoin.verify(sign_content,signer,old_signs_content[signer]); - print(res) - if res: - has_valid_sign = has_valid_sign or res - if has_valid_sign: - new_content["modified"] = content["modified"] - sign_content = json.dumps(new_content, sort_keys=True) - else: - new_content["modified"] = int(time.time()) # Add timestamp - sign_content = json.dumps(new_content, sort_keys=True) + sign_content = json.dumps(new_content, sort_keys=True) sign = CryptBitcoin.sign(sign_content, privatekey) # new_content["signs"] = content.get("signs", {}) # TODO: Multisig if sign: # If signing is successful (not an old address) - new_content["signs"] = old_signs_content or {} + new_content["signs"] = {} new_content["signs"][privatekey_address] = sign - self.verifyContent(inner_path, new_content) + if inner_path == "content.json": # To root content.json add old format sign for backward compatibility + oldsign_content = json.dumps(new_content, sort_keys=True) + new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey) + + if not self.verifyContent(inner_path, new_content): + self.log.error("Sign failed: Invalid content") + return False if filewrite: self.log.info("Saving to %s..." % inner_path) @@ -806,7 +631,7 @@ class ContentManager(object): valid_signers = [] if inner_path == "content.json": # Root content.json if "content.json" in self.contents and "signers" in self.contents["content.json"]: - valid_signers += self.contents["content.json"]["signers"][:] + valid_signers += self.contents["content.json"]["signers"].keys() else: rules = self.getRules(inner_path, content) if rules and "signers" in rules: @@ -818,58 +643,44 @@ class ContentManager(object): # Return: The required number of valid signs for the content.json def getSignsRequired(self, inner_path, content=None): - if not content: - return 1 - return content.get("signs_required", 1) - - def verifyCertSign(self, user_address, user_auth_type, user_name, issuer_address, sign): - from Crypt import CryptBitcoin - cert_subject = "%s#%s/%s" % (user_address, user_auth_type, user_name) - return CryptBitcoin.verify(cert_subject, issuer_address, sign) + return 1 # Todo: Multisig def verifyCert(self, inner_path, content): + from Crypt import CryptBitcoin + rules = self.getRules(inner_path, content) - if not rules: - raise VerifyError("No rules for this file") - - if not rules.get("cert_signers") and not rules.get("cert_signers_pattern"): + if not rules.get("cert_signers"): return True # Does not need cert - if "cert_user_id" not in content: - raise VerifyError("Missing cert_user_id") - - if content["cert_user_id"].count("@") != 1: - raise VerifyError("Invalid domain in cert_user_id") - - name, domain = content["cert_user_id"].rsplit("@", 1) + name, domain = content["cert_user_id"].split("@") cert_address = rules["cert_signers"].get(domain) - if not cert_address: # Unknown Cert signer - if rules.get("cert_signers_pattern") and SafeRe.match(rules["cert_signers_pattern"], domain): - cert_address = domain - else: - raise VerifyError("Invalid cert signer: %s" % domain) + if not cert_address: # Cert signer not allowed + self.log.warning("Invalid cert signer: %s" % domain) + return False - return self.verifyCertSign(rules["user_address"], content["cert_auth_type"], name, cert_address, content["cert_sign"]) + try: + cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name) + result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"]) + except Exception, err: + self.log.warning("Certificate verify error: %s" % err) + result = False + return result # Checks if the content.json content is valid # Return: True or False def verifyContent(self, inner_path, content): - content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in list(content["files"].values()) if file["size"] >= 0]) # Size of new content + content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values()]) # Size of new content # Calculate old content size old_content = self.contents.get(inner_path) if old_content: - old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in list(old_content.get("files", {}).values())]) - old_content_size_optional = sum([file["size"] for file in list(old_content.get("files_optional", {}).values())]) + old_content_size = len(json.dumps(old_content, indent=1)) + sum([file["size"] for file in old_content.get("files", {}).values()]) + old_content_size_optional = sum([file["size"] for file in old_content.get("files_optional", {}).values()]) else: old_content_size = 0 old_content_size_optional = 0 - # Reset site site on first content.json - if not old_content and inner_path == "content.json": - self.site.settings["size"] = 0 - - content_size_optional = sum([file["size"] for file in list(content.get("files_optional", {}).values()) if file["size"] >= 0]) + content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values()]) site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new @@ -877,71 +688,69 @@ class ContentManager(object): # Check site address if content.get("address") and content["address"] != self.site.address: - raise VerifyError("Wrong site address: %s != %s" % (content["address"], self.site.address)) + self.log.warning("%s: Wrong site address: %s != %s" % (inner_path, content["address"], self.site.address)) + return False # Check file inner path if content.get("inner_path") and content["inner_path"] != inner_path: - raise VerifyError("Wrong inner_path: %s" % content["inner_path"]) + self.log.warning("%s: Wrong inner_path: %s" % (inner_path, content["inner_path"])) + return False - # If our content.json file bigger than the size limit throw error - if inner_path == "content.json": - content_size_file = len(json.dumps(content, indent=1)) - if content_size_file > site_size_limit: - # Save site size to display warning + # Check total site size limit + if site_size > site_size_limit: + self.log.warning("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit)) + if inner_path == "content.json" and self.site.settings["size"] == 0: + # First content.json download, save site size to display warning self.site.settings["size"] = site_size - task = self.site.worker_manager.tasks.findTask(inner_path) - if task: # Dont try to download from other peers - self.site.worker_manager.failTask(task) - raise VerifyError("Content too large %s B > %s B, aborting task..." % (site_size, site_size_limit)) - - # Verify valid filenames - for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): - if not self.isValidRelativePath(file_relative_path): - raise VerifyError("Invalid relative path: %s" % file_relative_path) + task = self.site.worker_manager.findTask(inner_path) + if task: # Dont try to download from other peers + self.site.worker_manager.failTask(task) + return False if inner_path == "content.json": self.site.settings["size"] = site_size self.site.settings["size_optional"] = site_size_optional return True # Root content.json is passed - else: - if self.verifyContentInclude(inner_path, content, content_size, content_size_optional): - self.site.settings["size"] = site_size - self.site.settings["size_optional"] = site_size_optional - return True - else: - raise VerifyError("Content verify error") - def verifyContentInclude(self, inner_path, content, content_size, content_size_optional): # Load include details rules = self.getRules(inner_path, content) if not rules: - raise VerifyError("No rules") + self.log.warning("%s: No rules" % inner_path) + return False # Check include size limit if rules.get("max_size") is not None: # Include size limit if content_size > rules["max_size"]: - raise VerifyError("Include too large %sB > %sB" % (content_size, rules["max_size"])) + self.log.warning("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"])) + return False if rules.get("max_size_optional") is not None: # Include optional files limit if content_size_optional > rules["max_size_optional"]: - raise VerifyError("Include optional files too large %sB > %sB" % ( - content_size_optional, rules["max_size_optional"]) + self.log.warning("%s: Include optional files too large %s > %s" % ( + inner_path, content_size_optional, rules["max_size_optional"]) ) + return False # Filename limit if rules.get("files_allowed"): - for file_inner_path in list(content["files"].keys()): - if not SafeRe.match(r"^%s$" % rules["files_allowed"], file_inner_path): - raise VerifyError("File not allowed: %s" % file_inner_path) + for file_inner_path in content["files"].keys(): + if not re.match("^%s$" % rules["files_allowed"], file_inner_path): + self.log.warning("%s %s: File not allowed" % (inner_path, file_inner_path)) + return False if rules.get("files_allowed_optional"): - for file_inner_path in list(content.get("files_optional", {}).keys()): - if not SafeRe.match(r"^%s$" % rules["files_allowed_optional"], file_inner_path): - raise VerifyError("Optional file not allowed: %s" % file_inner_path) + for file_inner_path in content.get("files_optional", {}).keys(): + if not re.match("^%s$" % rules["files_allowed_optional"], file_inner_path): + self.log.warning("%s %s: Optional file not allowed" % (inner_path, file_inner_path)) + return False # Check if content includes allowed if rules.get("includes_allowed") is False and content.get("includes"): - raise VerifyError("Includes not allowed") + self.log.warning("%s: Includes not allowed" % inner_path) + return False # Includes not allowed + + self.site.settings["size"] = site_size + self.site.settings["size_optional"] = site_size_optional return True # All good @@ -954,29 +763,27 @@ class ContentManager(object): if type(file) is dict: new_content = file else: - try: - if sys.version_info.major == 3 and sys.version_info.minor < 6: - new_content = json.loads(file.read().decode("utf8")) - else: - new_content = json.load(file) - except Exception as err: - raise VerifyError("Invalid json file: %s" % err) + new_content = json.load(file) if inner_path in self.contents: old_content = self.contents.get(inner_path, {"modified": 0}) # Checks if its newer the ours if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json return None elif old_content["modified"] > new_content["modified"]: # We have newer - raise VerifyError( - "We have newer (Our: %s, Sent: %s)" % - (old_content["modified"], new_content["modified"]) + self.log.debug( + "We have newer %s (Our: %s, Sent: %s)" % + (inner_path, old_content["modified"], new_content["modified"]) ) + # gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers + return False if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+) - raise VerifyError("Modify timestamp is in the far future!") + self.log.warning("%s modify is in the future!" % inner_path) + return False if self.isArchived(inner_path, new_content["modified"]): + self.log.warning("%s this file is archived!" % inner_path) if inner_path in self.site.bad_files: del self.site.bad_files[inner_path] - raise VerifyError("This file is archived!") + return False # Check sign sign = new_content.get("sign") signs = new_content.get("signs", {}) @@ -996,72 +803,129 @@ class ContentManager(object): '"modified": %s' % modified_fixed ) + if not self.verifyContent(inner_path, new_content): + return False # Content not valid (files too large, invalid files) + if signs: # New style signing valid_signers = self.getValidSigners(inner_path, new_content) signs_required = self.getSignsRequired(inner_path, new_content) if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json - signers_data = "%s:%s" % (signs_required, ",".join(valid_signers)) - if not CryptBitcoin.verify(signers_data, self.site.address, new_content["signers_sign"]): - raise VerifyError("Invalid signers_sign!") + if not CryptBitcoin.verify( + "%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"] + ): + self.log.warning("%s invalid signers_sign!" % inner_path) + return False if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid - raise VerifyError("Invalid cert!") + self.log.warning("%s invalid cert!" % inner_path) + return False - valid_signs = [] + valid_signs = 0 for address in valid_signers: if address in signs: - result = CryptBitcoin.verify(sign_content, address, signs[address]) - if result: - valid_signs.append(address) - if len(valid_signs) >= signs_required: + valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) + if valid_signs >= signs_required: break # Break if we has enough signs - if len(valid_signs) < signs_required: - raise VerifyError("Valid signs: %s/%s, Valid Signers : %s" % (len(valid_signs), signs_required, valid_signs)) - else: - return self.verifyContent(inner_path, new_content) + if config.verbose: + self.log.debug("%s: Valid signs: %s/%s" % (inner_path, valid_signs, signs_required)) + return valid_signs >= signs_required else: # Old style signing - raise VerifyError("Invalid old-style sign") + return CryptBitcoin.verify(sign_content, self.site.address, sign) - except Exception as err: - self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err))) - raise err + except Exception, err: + self.log.warning("Verify sign error: %s" % Debug.formatException(err)) + return False else: # Check using sha512 hash file_info = self.getFileInfo(inner_path) if file_info: - if CryptHash.sha512sum(file) != file_info.get("sha512", ""): - raise VerifyError("Invalid hash") - + if "sha512" in file_info: + hash_valid = CryptHash.sha512sum(file) == file_info["sha512"] + elif "sha1" in file_info: # Backward compatibility + hash_valid = CryptHash.sha1sum(file) == file_info["sha1"] + else: + hash_valid = False if file_info.get("size", 0) != file.tell(): - raise VerifyError( - "File size does not match %s <> %s" % - (inner_path, file.tell(), file_info.get("size", 0)) + self.log.warning( + "%s file size does not match %s <> %s, Hash: %s" % + (inner_path, file.tell(), file_info.get("size", 0), hash_valid) ) - - return True + return False + return hash_valid else: # File not in content.json - raise VerifyError("File not in content.json") + self.log.warning("File not in content.json: %s" % inner_path) + return False - def optionalDelete(self, inner_path): - self.site.storage.delete(inner_path) - - def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): + def optionalDownloaded(self, inner_path, hash, size=None, own=False): if size is None: size = self.site.storage.getSize(inner_path) - - done = self.hashfield.appendHashId(hash_id) + if type(hash) is int: + done = self.hashfield.appendHashId(hash) + else: + done = self.hashfield.appendHash(hash) self.site.settings["optional_downloaded"] += size return done - def optionalRemoved(self, inner_path, hash_id, size=None): + def optionalRemove(self, inner_path, hash, size=None): if size is None: size = self.site.storage.getSize(inner_path) - done = self.hashfield.removeHashId(hash_id) - + if type(hash) is int: + done = self.hashfield.removeHashId(hash) + else: + done = self.hashfield.removeHash(hash) self.site.settings["optional_downloaded"] -= size return done - def optionalRenamed(self, inner_path_old, inner_path_new): - return True + +if __name__ == "__main__": + def testSign(): + global config + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + content_manager = ContentManager(site) + content_manager.sign( + "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR" + ) + + def testVerify(): + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + + content_manager = ContentManager(site) + print "Loaded contents:", content_manager.contents.keys() + + file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")) + print "content.json valid:", content_manager.verifyFile( + "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False + ) + + file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json")) + print "messages.json valid:", content_manager.verifyFile( + "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False + ) + + def testInfo(): + from Site import Site + site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") + + content_manager = ContentManager(site) + print content_manager.contents.keys() + + print content_manager.getFileInfo("index.html") + print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") + print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") + print content_manager.getValidSigners("data/users/content.json") + print content_manager.getValidSigners("content.json") + + import sys + import logging + os.chdir("../..") + sys.path.insert(0, os.path.abspath(".")) + sys.path.insert(0, os.path.abspath("src")) + logging.basicConfig(level=logging.DEBUG) + + # testSign() + testVerify() + # testInfo() diff --git a/src/Content/__init__.py b/src/Content/__init__.py index fbbd39f4..fab39f93 100644 --- a/src/Content/__init__.py +++ b/src/Content/__init__.py @@ -1 +1 @@ -from .ContentManager import ContentManager \ No newline at end of file +from ContentManager import ContentManager \ No newline at end of file diff --git a/src/Crypt/Crypt.py b/src/Crypt/Crypt.py deleted file mode 100644 index 7d7d3659..00000000 --- a/src/Crypt/Crypt.py +++ /dev/null @@ -1,4 +0,0 @@ -from Config import config -from util import ThreadPool - -thread_pool_crypt = ThreadPool.ThreadPool(config.threads_crypt) \ No newline at end of file diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py index 68b2caa2..15d76d5f 100644 --- a/src/Crypt/CryptBitcoin.py +++ b/src/Crypt/CryptBitcoin.py @@ -1,101 +1,75 @@ import logging -import base64 -import binascii -import time -import hashlib -from util.Electrum import dbl_format +from lib.BitcoinECC import BitcoinECC +from lib.pybitcointools import bitcoin as btctools from Config import config -import util.OpensslFindPatch - -lib_verify_best = "sslcrypto" - -from lib import sslcrypto -sslcurve_native = sslcrypto.ecc.get_curve("secp256k1") -sslcurve_fallback = sslcrypto.fallback.ecc.get_curve("secp256k1") -sslcurve = sslcurve_native - -def loadLib(lib_name, silent=False): - global sslcurve, libsecp256k1message, lib_verify_best - if lib_name == "libsecp256k1": - s = time.time() - from lib import libsecp256k1message - import coincurve - lib_verify_best = "libsecp256k1" - if not silent: - logging.info( - "Libsecpk256k1 loaded: %s in %.3fs" % - (type(coincurve._libsecp256k1.lib).__name__, time.time() - s) - ) - elif lib_name == "sslcrypto": - sslcurve = sslcurve_native - if sslcurve_native == sslcurve_fallback: - logging.warning("SSLCurve fallback loaded instead of native") - elif lib_name == "sslcrypto_fallback": - sslcurve = sslcurve_fallback - +# Try to load openssl try: - if not config.use_libsecp256k1: + if not config.use_openssl: raise Exception("Disabled by config") - loadLib("libsecp256k1") - lib_verify_best = "libsecp256k1" -except Exception as err: - logging.info("Libsecp256k1 load failed: %s" % err) + from lib.opensslVerify import opensslVerify + logging.info("OpenSSL loaded, version: %s" % opensslVerify.openssl_version) +except Exception, err: + logging.info("OpenSSL load failed: %s, falling back to slow bitcoin verify" % err) + opensslVerify = None -def newPrivatekey(): # Return new private key - return sslcurve.private_to_wif(sslcurve.new_private_key()).decode() +def newPrivatekey(uncompressed=True): # Return new private key + privatekey = btctools.encode_privkey(btctools.random_key(), "wif") + return privatekey def newSeed(): - return binascii.hexlify(sslcurve.new_private_key()).decode() + return btctools.random_key() def hdPrivatekey(seed, child): - # Too large child id could cause problems - privatekey_bin = sslcurve.derive_child(seed.encode(), child % 100000000) - return sslcurve.private_to_wif(privatekey_bin).decode() + masterkey = btctools.bip32_master_key(seed) + childkey = btctools.bip32_ckd(masterkey, child % 100000000) # Too large child id could cause problems + key = btctools.bip32_extract_key(childkey) + return btctools.encode_privkey(key, "wif") def privatekeyToAddress(privatekey): # Return address from private key - try: - if len(privatekey) == 64: - privatekey_bin = bytes.fromhex(privatekey) - else: - privatekey_bin = sslcurve.wif_to_private(privatekey.encode()) - return sslcurve.private_to_address(privatekey_bin).decode() - except Exception: # Invalid privatekey - return False + if privatekey.startswith("23") and len(privatekey) > 52: # Backward compatibility to broken lib + bitcoin = BitcoinECC.Bitcoin() + bitcoin.BitcoinAddressFromPrivate(privatekey) + return bitcoin.BitcoinAddresFromPublicKey() + else: + try: + return btctools.privkey_to_address(privatekey) + except Exception: # Invalid privatekey + return False def sign(data, privatekey): # Return sign to data using private key if privatekey.startswith("23") and len(privatekey) > 52: return None # Old style private key not supported - return base64.b64encode(sslcurve.sign( - data.encode(), - sslcurve.wif_to_private(privatekey.encode()), - recoverable=True, - hash=dbl_format - )).decode() + sign = btctools.ecdsa_sign(data, privatekey) + return sign -def verify(data, valid_address, sign, lib_verify=None): # Verify data using address and sign - if not lib_verify: - lib_verify = lib_verify_best +def signOld(data, privatekey): # Return sign to data using private key (backward compatible old style) + bitcoin = BitcoinECC.Bitcoin() + bitcoin.BitcoinAddressFromPrivate(privatekey) + sign = bitcoin.SignECDSA(data) + return sign - if not sign: - return False - if lib_verify == "libsecp256k1": - sign_address = libsecp256k1message.recover_address(data.encode("utf8"), sign).decode("utf8") - elif lib_verify in ("sslcrypto", "sslcrypto_fallback"): - publickey = sslcurve.recover(base64.b64decode(sign), data.encode(), hash=dbl_format) - sign_address = sslcurve.public_to_address(publickey).decode() - else: - raise Exception("No library enabled for signature verification") +def verify(data, address, sign): # Verify data using address and sign + if hasattr(sign, "endswith"): + if opensslVerify: # Use the faster method if avalible + pub = opensslVerify.getMessagePubkey(data, sign) + sign_address = btctools.pubtoaddr(pub) + else: # Use pure-python + pub = btctools.ecdsa_recover(data, sign) + sign_address = btctools.pubtoaddr(pub) - if type(valid_address) is list: # Any address in the list - return sign_address in valid_address - else: # One possible address - return sign_address == valid_address + if type(address) is list: # Any address in the list + return sign_address in address + else: # One possible address + return sign_address == address + else: # Backward compatible old style + bitcoin = BitcoinECC.Bitcoin() + return bitcoin.VerifyMessageFromBitcoinAddress(address, data, sign) diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py index c0903e84..b30c7e49 100644 --- a/src/Crypt/CryptConnection.py +++ b/src/Crypt/CryptConnection.py @@ -3,80 +3,23 @@ import logging import os import ssl import hashlib -import random from Config import config +from util import SslPatch from util import helper class CryptConnectionManager: def __init__(self): - if config.openssl_bin_file: - self.openssl_bin = config.openssl_bin_file - elif sys.platform.startswith("win"): - self.openssl_bin = "tools\\openssl\\openssl.exe" - elif config.dist_type.startswith("bundle_linux"): - self.openssl_bin = "../runtime/bin/openssl" + # OpenSSL params + if sys.platform.startswith("win"): + self.openssl_bin = "src\\lib\\opensslVerify\\openssl.exe" else: self.openssl_bin = "openssl" - - self.context_client = None - self.context_server = None - - self.openssl_conf_template = "src/lib/openssl/openssl.cnf" - self.openssl_conf = config.data_dir + "/openssl.cnf" - - self.openssl_env = { - "OPENSSL_CONF": self.openssl_conf, - "RANDFILE": config.data_dir + "/openssl-rand.tmp" - } + self.openssl_env = {"OPENSSL_CONF": "src/lib/opensslVerify/openssl.cnf"} self.crypt_supported = [] # Supported cryptos - self.cacert_pem = config.data_dir + "/cacert-rsa.pem" - self.cakey_pem = config.data_dir + "/cakey-rsa.pem" - self.cert_pem = config.data_dir + "/cert-rsa.pem" - self.cert_csr = config.data_dir + "/cert-rsa.csr" - self.key_pem = config.data_dir + "/key-rsa.pem" - - self.log = logging.getLogger("CryptConnectionManager") - self.log.debug("Version: %s" % ssl.OPENSSL_VERSION) - - self.fakedomains = [ - "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com", - "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com", - "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp", - "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com", - "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com", - "nazwa.pl", "symantec.com" - ] - - def createSslContexts(self): - if self.context_server and self.context_client: - return False - ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:" - ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" - - if hasattr(ssl, "PROTOCOL_TLS"): - protocol = ssl.PROTOCOL_TLS - else: - protocol = ssl.PROTOCOL_TLSv1_2 - self.context_client = ssl.SSLContext(protocol) - self.context_client.check_hostname = False - self.context_client.verify_mode = ssl.CERT_NONE - - self.context_server = ssl.SSLContext(protocol) - self.context_server.load_cert_chain(self.cert_pem, self.key_pem) - - for ctx in (self.context_client, self.context_server): - ctx.set_ciphers(ciphers) - ctx.options |= ssl.OP_NO_COMPRESSION - try: - ctx.set_alpn_protocols(["h2", "http/1.1"]) - ctx.set_npn_protocols(["h2", "http/1.1"]) - except Exception: - pass - # Select crypt that supported by both sides # Return: Name of the crypto def selectCrypt(self, client_supported): @@ -89,14 +32,17 @@ class CryptConnectionManager: # Return: wrapped socket def wrapSocket(self, sock, crypt, server=False, cert_pin=None): if crypt == "tls-rsa": + ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:" + ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" if server: - sock_wrapped = self.context_server.wrap_socket(sock, server_side=True) + sock_wrapped = ssl.wrap_socket( + sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, + certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers) else: - sock_wrapped = self.context_client.wrap_socket(sock, server_hostname=random.choice(self.fakedomains)) + sock_wrapped = ssl.wrap_socket(sock, ciphers=ciphers) if cert_pin: cert_hash = hashlib.sha256(sock_wrapped.getpeercert(True)).hexdigest() - if cert_hash != cert_pin: - raise Exception("Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin)) + assert cert_hash == cert_pin, "Socket certificate does not match (%s != %s)" % (cert_hash, cert_pin) return sock_wrapped else: return sock @@ -104,7 +50,7 @@ class CryptConnectionManager: def removeCerts(self): if config.keep_ssl_cert: return False - for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr", "openssl-rand.tmp"]: + for file_name in ["cert-rsa.pem", "key-rsa.pem"]: file_path = "%s/%s" % (config.data_dir, file_name) if os.path.isfile(file_path): os.unlink(file_path) @@ -114,108 +60,69 @@ class CryptConnectionManager: if config.disable_encryption: return False - if self.createSslRsaCert() and "tls-rsa" not in self.crypt_supported: + if self.createSslRsaCert(): self.crypt_supported.append("tls-rsa") # Try to create RSA server cert + sign for connection encryption # Return: True on success def createSslRsaCert(self): - casubjects = [ - "/C=US/O=Amazon/OU=Server CA 1B/CN=Amazon", - "/C=US/O=Let's Encrypt/CN=Let's Encrypt Authority X3", - "/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA", - "/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA" - ] - self.openssl_env['CN'] = random.choice(self.fakedomains) - environ = os.environ - environ['OPENSSL_CONF'] = self.openssl_env['OPENSSL_CONF'] - environ['RANDFILE'] = self.openssl_env['RANDFILE'] - environ['CN'] = self.openssl_env['CN'] - - if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): - self.createSslContexts() + if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir): return True # Files already exits import subprocess - - # Replace variables in config template - conf_template = open(self.openssl_conf_template).read() - conf_template = conf_template.replace("$ENV::CN", self.openssl_env['CN']) - open(self.openssl_conf, "w").write(conf_template) - - # Generate CAcert and CAkey - cmd_params = helper.shellquote( + cmd = "%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s -out %s -nodes -config %s" % helper.shellquote( self.openssl_bin, - self.openssl_conf, - random.choice(casubjects), - self.cakey_pem, - self.cacert_pem + config.data_dir+"/key-rsa.pem", + config.data_dir+"/cert-rsa.pem", + self.openssl_env["OPENSSL_CONF"] ) - cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -config %s -subj %s -keyout %s -out %s -batch" % cmd_params - self.log.debug("Generating RSA CAcert and CAkey PEM files...") - self.log.debug("Running: %s" % cmd) proc = subprocess.Popen( - cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ + cmd.encode(sys.getfilesystemencoding()), + shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") + back = proc.stdout.read().strip() proc.wait() + logging.debug("Generating RSA cert and key PEM files...%s" % back) - if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)): - self.log.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist. (%s)" % back) - return False - else: - self.log.debug("Result: %s" % back) - - # Generate certificate key and signing request - cmd_params = helper.shellquote( - self.openssl_bin, - self.key_pem, - self.cert_csr, - "/CN=" + self.openssl_env['CN'], - self.openssl_conf, - ) - cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % cmd_params - self.log.debug("Generating certificate key and signing request...") - proc = subprocess.Popen( - cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ - ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") - proc.wait() - self.log.debug("Running: %s\n%s" % (cmd, back)) - - # Sign request and generate certificate - cmd_params = helper.shellquote( - self.openssl_bin, - self.cert_csr, - self.cacert_pem, - self.cakey_pem, - self.cert_pem, - self.openssl_conf - ) - cmd = "%s x509 -req -in %s -CA %s -CAkey %s -set_serial 01 -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % cmd_params - self.log.debug("Generating RSA cert...") - proc = subprocess.Popen( - cmd, shell=True, stderr=subprocess.STDOUT, - stdout=subprocess.PIPE, env=environ - ) - back = proc.stdout.read().strip().decode(errors="replace").replace("\r", "") - proc.wait() - self.log.debug("Running: %s\n%s" % (cmd, back)) - - if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): - self.createSslContexts() - - # Remove no longer necessary files - os.unlink(self.openssl_conf) - os.unlink(self.cacert_pem) - os.unlink(self.cakey_pem) - os.unlink(self.cert_csr) - + if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir): return True else: - self.log.error("RSA ECC SSL cert generation failed, cert or key files not exist.") + logging.error("RSA ECC SSL cert generation failed, cert or key files not exist.") + return False + # Not used yet: Missing on some platform + """def createSslEccCert(self): + return False + import subprocess + + # Create ECC privatekey + proc = subprocess.Popen( + "%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir), + shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env + ) + back = proc.stdout.read().strip() + proc.wait() + self.log.debug("Generating ECC privatekey PEM file...%s" % back) + + # Create ECC cert + proc = subprocess.Popen( + "%s req -new -key %s -x509 -nodes -out %s -config %s" % helper.shellquote( + self.openssl_bin, + config.data_dir+"/key-ecc.pem", + config.data_dir+"/cert-ecc.pem", + self.openssl_env["OPENSSL_CONF"] + ), + shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env + ) + back = proc.stdout.read().strip() + proc.wait() + self.log.debug("Generating ECC cert PEM file...%s" % back) + + if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir): + return True + else: + self.logging.error("ECC SSL cert generation failed, cert or key files not exits.") + return False + """ manager = CryptConnectionManager() diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py index f5901fb8..fb0c2dab 100644 --- a/src/Crypt/CryptHash.py +++ b/src/Crypt/CryptHash.py @@ -3,25 +3,29 @@ import os import base64 -def sha512sum(file, blocksize=65536, format="hexdigest"): - if type(file) is str: # Filename specified +def sha1sum(file, blocksize=65536): + if hasattr(file, "endswith"): # Its a string open it + file = open(file, "rb") + hash = hashlib.sha1() + for block in iter(lambda: file.read(blocksize), ""): + hash.update(block) + return hash.hexdigest() + + +def sha512sum(file, blocksize=65536): + if hasattr(file, "endswith"): # Its a string open it file = open(file, "rb") hash = hashlib.sha512() - for block in iter(lambda: file.read(blocksize), b""): + for block in iter(lambda: file.read(blocksize), ""): hash.update(block) - - # Truncate to 256bits is good enough - if format == "hexdigest": - return hash.hexdigest()[0:64] - else: - return hash.digest()[0:32] + return hash.hexdigest()[0:64] # Truncate to 256bits is good enough def sha256sum(file, blocksize=65536): - if type(file) is str: # Filename specified + if hasattr(file, "endswith"): # Its a string open it file = open(file, "rb") hash = hashlib.sha256() - for block in iter(lambda: file.read(blocksize), b""): + for block in iter(lambda: file.read(blocksize), ""): hash.update(block) return hash.hexdigest() @@ -29,28 +33,25 @@ def sha256sum(file, blocksize=65536): def random(length=64, encoding="hex"): if encoding == "base64": # Characters: A-Za-z0-9 hash = hashlib.sha512(os.urandom(256)).digest() - return base64.b64encode(hash).decode("ascii").replace("+", "").replace("/", "").replace("=", "")[0:length] + return base64.standard_b64encode(hash).replace("+", "").replace("/", "").replace("=", "")[0:length] else: # Characters: a-f0-9 (faster) return hashlib.sha512(os.urandom(256)).hexdigest()[0:length] -# Sha512 truncated to 256bits -class Sha512t: - def __init__(self, data): - if data: - self.sha512 = hashlib.sha512(data) - else: - self.sha512 = hashlib.sha512() - def hexdigest(self): - return self.sha512.hexdigest()[0:64] +if __name__ == "__main__": + import cStringIO as StringIO + a = StringIO.StringIO() + a.write("hello!") + a.seek(0) + print hashlib.sha1("hello!").hexdigest() + print sha1sum(a) - def digest(self): - return self.sha512.digest()[0:32] + import time + s = time.time() + print sha1sum(open("F:\\Temp\\bigfile")), + print time.time() - s - def update(self, data): - return self.sha512.update(data) - - -def sha512t(data=None): - return Sha512t(data) + s = time.time() + print sha512sum(open("F:\\Temp\\bigfile")), + print time.time() - s diff --git a/src/Crypt/CryptRsa.py b/src/Crypt/CryptRsa.py new file mode 100644 index 00000000..694ef34f --- /dev/null +++ b/src/Crypt/CryptRsa.py @@ -0,0 +1,38 @@ +import base64 +import hashlib + +def sign(data, privatekey): + from lib import rsa + from lib.rsa import pkcs1 + + if "BEGIN RSA PRIVATE KEY" not in privatekey: + privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey + + priv = rsa.PrivateKey.load_pkcs1(privatekey) + sign = rsa.pkcs1.sign(data, priv, 'SHA-256') + return sign + +def verify(data, publickey, sign): + from lib import rsa + from lib.rsa import pkcs1 + + pub = rsa.PublicKey.load_pkcs1(publickey, format="DER") + try: + valid = rsa.pkcs1.verify(data, sign, pub) + except pkcs1.VerificationError: + valid = False + return valid + +def privatekeyToPublickey(privatekey): + from lib import rsa + from lib.rsa import pkcs1 + + if "BEGIN RSA PRIVATE KEY" not in privatekey: + privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey + + priv = rsa.PrivateKey.load_pkcs1(privatekey) + pub = rsa.PublicKey(priv.n, priv.e) + return pub.save_pkcs1("DER") + +def publickeyToOnion(publickey): + return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower() diff --git a/src/Crypt/CryptTor.py b/src/Crypt/CryptTor.py deleted file mode 100644 index 78ba6fc2..00000000 --- a/src/Crypt/CryptTor.py +++ /dev/null @@ -1,85 +0,0 @@ -import base64 -import hashlib - -def sign(data, privatekey): - import rsa - from rsa import pkcs1 - from lib import Ed25519 - - ## Onion Service V3 - if len(privatekey) == 88: - prv_key = base64.b64decode(privatekey) - pub_key = Ed25519.publickey_unsafe(prv_key) - sign = Ed25519.signature_unsafe(data, prv_key, pub_key) - - return sign - - ## Onion Service V2 - if "BEGIN RSA PRIVATE KEY" not in privatekey: - privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey - - priv = rsa.PrivateKey.load_pkcs1(privatekey) - sign = rsa.pkcs1.sign(data, priv, 'SHA-256') - return sign - -def verify(data, publickey, sign): - import rsa - from rsa import pkcs1 - from lib import Ed25519 - - ## Onion Service V3 - if len(publickey) == 32: - - try: - valid = Ed25519.checkvalid(sign, data, publickey) - valid = 'SHA-256' - - except Exception as err: - print(err) - valid = False - - return valid - - ## Onion Service V2 - pub = rsa.PublicKey.load_pkcs1(publickey, format="DER") - - try: - valid = rsa.pkcs1.verify(data, sign, pub) - - except pkcs1.VerificationError: - valid = False - - return valid - -def privatekeyToPublickey(privatekey): - import rsa - from rsa import pkcs1 - from lib import Ed25519 - - ## Onion Service V3 - if len(privatekey) == 88: - prv_key = base64.b64decode(privatekey) - pub_key = Ed25519.publickey_unsafe(prv_key) - - return pub_key - - ## Onion Service V2 - if "BEGIN RSA PRIVATE KEY" not in privatekey: - privatekey = "-----BEGIN RSA PRIVATE KEY-----\n%s\n-----END RSA PRIVATE KEY-----" % privatekey - - priv = rsa.PrivateKey.load_pkcs1(privatekey) - pub = rsa.PublicKey(priv.n, priv.e) - - return pub.save_pkcs1("DER") - -def publickeyToOnion(publickey): - from lib import Ed25519 - - ## Onion Service V3 - if len(publickey) == 32: - addr = Ed25519.publickey_to_onionaddress(publickey)[:-6] - - return addr - - ## Onion Service V2 - return base64.b32encode(hashlib.sha1(publickey).digest()[:10]).lower().decode("ascii") diff --git a/src/Db/Db.py b/src/Db/Db.py index d1d9ce15..de711a21 100644 --- a/src/Db/Db.py +++ b/src/Db/Db.py @@ -4,24 +4,11 @@ import time import logging import re import os -import atexit -import threading -import sys -import weakref -import errno - import gevent -from Debug import Debug -from .DbCursor import DbCursor -from util import SafeRe -from util import helper -from util import ThreadPool +from DbCursor import DbCursor from Config import config -thread_pool_db = ThreadPool.ThreadPool(config.threads_db) - -next_db_id = 0 opened_dbs = [] @@ -31,161 +18,76 @@ def dbCleanup(): time.sleep(60 * 5) for db in opened_dbs[:]: idle = time.time() - db.last_query_time - if idle > 60 * 5 and db.close_idle: - db.close("Cleanup") - - -def dbCommitCheck(): - while 1: - time.sleep(5) - for db in opened_dbs[:]: - if not db.need_commit: - continue - - success = db.commit("Interval") - if success: - db.need_commit = False - time.sleep(0.1) - - -def dbCloseAll(): - for db in opened_dbs[:]: - db.close("Close all") - + if idle > 60 * 5: + db.close() gevent.spawn(dbCleanup) -gevent.spawn(dbCommitCheck) -atexit.register(dbCloseAll) - - -class DbTableError(Exception): - def __init__(self, message, table): - super().__init__(message) - self.table = table class Db(object): - def __init__(self, schema, db_path, close_idle=False): - global next_db_id + def __init__(self, schema, db_path): self.db_path = db_path self.db_dir = os.path.dirname(db_path) + "/" self.schema = schema self.schema["version"] = self.schema.get("version", 1) self.conn = None self.cur = None - self.cursors = weakref.WeakSet() - self.id = next_db_id - next_db_id += 1 - self.progress_sleeping = False - self.commiting = False - self.log = logging.getLogger("Db#%s:%s" % (self.id, schema["db_name"])) + self.log = logging.getLogger("Db:%s" % schema["db_name"]) self.table_names = None self.collect_stats = False self.foreign_keys = False - self.need_commit = False self.query_stats = {} self.db_keyvalues = {} self.delayed_queue = [] self.delayed_queue_thread = None - self.close_idle = close_idle self.last_query_time = time.time() - self.last_sleep_time = time.time() - self.num_execute_since_sleep = 0 - self.lock = ThreadPool.Lock() - self.connect_lock = ThreadPool.Lock() def __repr__(self): - return "" % (id(self), self.db_path, self.close_idle) + return "" % self.db_path def connect(self): - self.connect_lock.acquire(True) - try: - if self.conn: - self.log.debug("Already connected, connection ignored") - return - - if self not in opened_dbs: - opened_dbs.append(self) - s = time.time() - try: # Directory not exist yet - os.makedirs(self.db_dir) - self.log.debug("Created Db path: %s" % self.db_dir) - except OSError as err: - if err.errno != errno.EEXIST: - raise err - if not os.path.isfile(self.db_path): - self.log.debug("Db file not exist yet: %s" % self.db_path) - self.conn = sqlite3.connect(self.db_path, isolation_level="DEFERRED", check_same_thread=False) - self.conn.row_factory = sqlite3.Row - self.conn.set_progress_handler(self.progress, 5000000) - self.conn.execute('PRAGMA journal_mode=WAL') - if self.foreign_keys: - self.conn.execute("PRAGMA foreign_keys = ON") - self.cur = self.getCursor() - - self.log.debug( - "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % - (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) - ) - self.log.debug("Connect by thread: %s" % threading.current_thread().ident) - self.log.debug("Connect called by %s" % Debug.formatStack()) - finally: - self.connect_lock.release() - - def getConn(self): - if not self.conn: - self.connect() - return self.conn - - def progress(self, *args, **kwargs): - self.progress_sleeping = True - time.sleep(0.001) - self.progress_sleeping = False + if self not in opened_dbs: + opened_dbs.append(self) + s = time.time() + if not os.path.isdir(self.db_dir): # Directory not exist yet + os.makedirs(self.db_dir) + self.log.debug("Created Db path: %s" % self.db_dir) + if not os.path.isfile(self.db_path): + self.log.debug("Db file not exist yet: %s" % self.db_path) + self.conn = sqlite3.connect(self.db_path, check_same_thread=False) + self.conn.row_factory = sqlite3.Row + self.conn.isolation_level = None + self.cur = self.getCursor() + if config.db_mode == "security": + self.cur.execute("PRAGMA journal_mode = WAL") + self.cur.execute("PRAGMA synchronous = NORMAL") + else: + self.cur.execute("PRAGMA journal_mode = MEMORY") + self.cur.execute("PRAGMA synchronous = OFF") + if self.foreign_keys: + self.execute("PRAGMA foreign_keys = ON") + self.log.debug( + "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % + (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) + ) # Execute query using dbcursor def execute(self, query, params=None): + self.last_query_time = time.time() if not self.conn: self.connect() return self.cur.execute(query, params) - @thread_pool_db.wrap - def commit(self, reason="Unknown"): - if self.progress_sleeping: - self.log.debug("Commit ignored: Progress sleeping") - return False - - if not self.conn: - self.log.debug("Commit ignored: No connection") - return False - - if self.commiting: - self.log.debug("Commit ignored: Already commiting") - return False - - try: - s = time.time() - self.commiting = True - self.conn.commit() - self.log.debug("Commited in %.3fs (reason: %s)" % (time.time() - s, reason)) - return True - except Exception as err: - if "SQL statements in progress" in str(err): - self.log.warning("Commit delayed: %s (reason: %s)" % (Debug.formatException(err), reason)) - else: - self.log.error("Commit error: %s (reason: %s)" % (Debug.formatException(err), reason)) - return False - finally: - self.commiting = False - def insertOrUpdate(self, *args, **kwargs): + self.last_query_time = time.time() if not self.conn: self.connect() return self.cur.insertOrUpdate(*args, **kwargs) def executeDelayed(self, *args, **kwargs): if not self.delayed_queue_thread: - self.delayed_queue_thread = gevent.spawn_later(1, self.processDelayed) + self.delayed_queue_thread = gevent.spawn_later(10, self.processDelayed) self.delayed_queue.append(("execute", (args, kwargs))) def insertOrUpdateDelayed(self, *args, **kwargs): @@ -197,66 +99,45 @@ class Db(object): if not self.delayed_queue: self.log.debug("processDelayed aborted") return + self.last_query_time = time.time() if not self.conn: self.connect() s = time.time() cur = self.getCursor() + cur.execute("BEGIN") for command, params in self.delayed_queue: if command == "insertOrUpdate": cur.insertOrUpdate(*params[0], **params[1]) else: cur.execute(*params[0], **params[1]) + cur.execute("END") if len(self.delayed_queue) > 10: self.log.debug("Processed %s delayed queue in %.3fs" % (len(self.delayed_queue), time.time() - s)) self.delayed_queue = [] self.delayed_queue_thread = None - def close(self, reason="Unknown"): - if not self.conn: - return False - self.connect_lock.acquire() + def close(self): s = time.time() if self.delayed_queue: self.processDelayed() if self in opened_dbs: opened_dbs.remove(self) - self.need_commit = False - self.commit("Closing: %s" % reason) - self.log.debug("Close called by %s" % Debug.formatStack()) - for i in range(5): - if len(self.cursors) == 0: - break - self.log.debug("Pending cursors: %s" % len(self.cursors)) - time.sleep(0.1 * i) - if len(self.cursors): - self.log.debug("Killing cursors: %s" % len(self.cursors)) - self.conn.interrupt() - if self.cur: self.cur.close() if self.conn: - ThreadPool.main_loop.call(self.conn.close) + self.conn.close() self.conn = None self.cur = None - self.log.debug("%s closed (reason: %s) in %.3fs, opened: %s" % (self.db_path, reason, time.time() - s, len(opened_dbs))) - self.connect_lock.release() - return True + self.log.debug("%s closed in %.3fs, opened: %s" % (self.db_path, time.time() - s, len(opened_dbs))) # Gets a cursor object to database # Return: Cursor class def getCursor(self): if not self.conn: self.connect() - - cur = DbCursor(self) - return cur - - def getSharedCursor(self): - if not self.conn: - self.connect() - return self.cur + return DbCursor(self.conn, self) # Get the table version # Return: Table version or None if not exist @@ -264,8 +145,8 @@ class Db(object): if not self.db_keyvalues: # Get db keyvalues try: res = self.execute("SELECT * FROM keyvalue WHERE json_id=0") # json_id = 0 is internal keyvalues - except sqlite3.OperationalError as err: # Table not exist - self.log.debug("Query table version error: %s" % err) + except sqlite3.OperationalError, err: # Table not exist + self.log.debug("Query error: %s" % err) return False for row in res: @@ -278,8 +159,9 @@ class Db(object): def checkTables(self): s = time.time() changed_tables = [] + cur = self.getCursor() - cur = self.getSharedCursor() + cur.execute("BEGIN") # Check internal tables # Check keyvalue table @@ -294,50 +176,44 @@ class Db(object): if changed: changed_tables.append("keyvalue") - # Create json table if no custom one defined - if "json" not in self.schema.get("tables", {}): - if self.schema["version"] == 1: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["path", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(path)" - ], version=self.schema["version"]) - elif self.schema["version"] == 2: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["directory", "VARCHAR(255)"], - ["file_name", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(directory, file_name)" - ], version=self.schema["version"]) - elif self.schema["version"] == 3: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["site", "VARCHAR(255)"], - ["directory", "VARCHAR(255)"], - ["file_name", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(directory, site, file_name)" - ], version=self.schema["version"]) - if changed: - changed_tables.append("json") + # Check json table + if self.schema["version"] == 1: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["path", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(path)" + ], version=self.schema["version"]) + elif self.schema["version"] == 2: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["directory", "VARCHAR(255)"], + ["file_name", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(directory, file_name)" + ], version=self.schema["version"]) + elif self.schema["version"] == 3: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["site", "VARCHAR(255)"], + ["directory", "VARCHAR(255)"], + ["file_name", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(directory, site, file_name)" + ], version=self.schema["version"]) + if changed: + changed_tables.append("json") # Check schema tables - for table_name, table_settings in self.schema.get("tables", {}).items(): - try: - indexes = table_settings.get("indexes", []) - version = table_settings.get("schema_changed", 0) - changed = cur.needTable( - table_name, table_settings["cols"], - indexes, version=version - ) - if changed: - changed_tables.append(table_name) - except Exception as err: - self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err))) - raise DbTableError(err, table_name) + for table_name, table_settings in self.schema["tables"].items(): + changed = cur.needTable( + table_name, table_settings["cols"], + table_settings["indexes"], version=table_settings["schema_changed"] + ) + if changed: + changed_tables.append(table_name) + cur.execute("COMMIT") self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables)) if changed_tables: self.db_keyvalues = {} # Refresh table version cache @@ -349,16 +225,12 @@ class Db(object): def updateJson(self, file_path, file=None, cur=None): if not file_path.startswith(self.db_dir): return False # Not from the db dir: Skipping - relative_path = file_path[len(self.db_dir):] # File path realative to db file - + relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file # Check if filename matches any of mappings in schema matched_maps = [] for match, map_settings in self.schema["maps"].items(): - try: - if SafeRe.match(match, relative_path): - matched_maps.append(map_settings) - except SafeRe.UnsafePatternError as err: - self.log.error(err) + if re.match(match, relative_path): + matched_maps.append(map_settings) # No match found for the file if not matched_maps: @@ -367,29 +239,27 @@ class Db(object): # Load the json file try: if file is None: # Open file is not file object passed - file = open(file_path, "rb") + file = open(file_path) if file is False: # File deleted data = {} else: - if file_path.endswith("json.gz"): - file = helper.limitedGzipFile(fileobj=file) - - if sys.version_info.major == 3 and sys.version_info.minor < 6: - data = json.loads(file.read().decode("utf8")) - else: - data = json.load(file) - except Exception as err: + data = json.load(file) + except Exception, err: self.log.debug("Json file %s load error: %s" % (file_path, err)) data = {} # No cursor specificed if not cur: - cur = self.getSharedCursor() + cur = self.getCursor() + cur.execute("BEGIN") cur.logging = False + commit_after_done = True + else: + commit_after_done = False # Row for current json file if required - if not data or [dbmap for dbmap in matched_maps if "to_keyvalue" in dbmap or "to_table" in dbmap]: + if not data or filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps): json_row = cur.getJsonRow(relative_path) # Check matched mappings in schema @@ -426,7 +296,7 @@ class Db(object): changed = True if changed: # Add the custom col values - data_json_row.update({key: val for key, val in data.items() if key in dbmap["to_json_table"]}) + data_json_row.update({key: val for key, val in data.iteritems() if key in dbmap["to_json_table"]}) cur.execute("INSERT OR REPLACE INTO json ?", data_json_row) # Insert data to tables @@ -448,7 +318,7 @@ class Db(object): # Fill import cols from table cols if not import_cols: - import_cols = set([item[0] for item in self.schema["tables"][table_name]["cols"]]) + import_cols = set(map(lambda item: item[0], self.schema["tables"][table_name]["cols"])) cur.execute("DELETE FROM %s WHERE json_id = ?" % table_name, (json_row["json_id"],)) @@ -456,28 +326,28 @@ class Db(object): continue if key_col: # Map as dict - for key, val in data[node].items(): + for key, val in data[node].iteritems(): if val_col: # Single value cur.execute( "INSERT OR REPLACE INTO %s ?" % table_name, {key_col: key, val_col: val, "json_id": json_row["json_id"]} ) else: # Multi value - if type(val) is dict: # Single row + if isinstance(val, dict): # Single row row = val if import_cols: row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols row[key_col] = key # Replace in value if necessary if replaces: - for replace_key, replace in replaces.items(): + for replace_key, replace in replaces.iteritems(): if replace_key in row: - for replace_from, replace_to in replace.items(): + for replace_from, replace_to in replace.iteritems(): row[replace_key] = row[replace_key].replace(replace_from, replace_to) row["json_id"] = json_row["json_id"] cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row) - elif type(val) is list: # Multi row + else: # Multi row for row in val: row[key_col] = key row["json_id"] = json_row["json_id"] @@ -494,6 +364,8 @@ class Db(object): self.log.debug("Cleanup json row for %s" % file_path) cur.execute("DELETE FROM json WHERE json_id = %s" % json_row["json_id"]) + if commit_after_done: + cur.execute("COMMIT") return True @@ -507,6 +379,7 @@ if __name__ == "__main__": dbjson.collect_stats = True dbjson.checkTables() cur = dbjson.getCursor() + cur.execute("BEGIN") cur.logging = False dbjson.updateJson("data/users/content.json", cur=cur) for user_dir in os.listdir("data/users"): @@ -514,6 +387,7 @@ if __name__ == "__main__": dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur) # print ".", cur.logging = True - print("Done in %.3fs" % (time.time() - s)) + cur.execute("COMMIT") + print "Done in %.3fs" % (time.time() - s) for query, stats in sorted(dbjson.query_stats.items()): - print("-", query, stats) + print "-", query, stats diff --git a/src/Db/DbCursor.py b/src/Db/DbCursor.py index acb8846d..728335d9 100644 --- a/src/Db/DbCursor.py +++ b/src/Db/DbCursor.py @@ -1,63 +1,38 @@ import time import re -from util import helper # Special sqlite cursor class DbCursor: - def __init__(self, db): + def __init__(self, conn, db): + self.conn = conn self.db = db + self.cursor = conn.cursor() self.logging = False - def quoteValue(self, value): - if type(value) is int: - return str(value) - else: - return "'%s'" % value.replace("'", "''") - - def parseQuery(self, query, params): - query_type = query.split(" ", 1)[0].upper() + def execute(self, query, params=None): if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params - if query_type in ("SELECT", "DELETE", "UPDATE"): + if query.startswith("SELECT") or query.startswith("DELETE") or query.startswith("UPDATE"): # Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format query_wheres = [] values = [] for key, value in params.items(): if type(value) is list: if key.startswith("not__"): - field = key.replace("not__", "") - operator = "NOT IN" + query_wheres.append(key.replace("not__", "") + " NOT IN (" + ",".join(["?"] * len(value)) + ")") else: - field = key - operator = "IN" - if len(value) > 100: - # Embed values in query to avoid "too many SQL variables" error - query_values = ",".join(map(helper.sqlquote, value)) - else: - query_values = ",".join(["?"] * len(value)) - values += value - query_wheres.append( - "%s %s (%s)" % - (field, operator, query_values) - ) + query_wheres.append(key + " IN (" + ",".join(["?"] * len(value)) + ")") + values += value else: if key.startswith("not__"): query_wheres.append(key.replace("not__", "") + " != ?") - elif key.endswith("__like"): - query_wheres.append(key.replace("__like", "") + " LIKE ?") - elif key.endswith(">"): - query_wheres.append(key.replace(">", "") + " > ?") - elif key.endswith("<"): - query_wheres.append(key.replace("<", "") + " < ?") else: query_wheres.append(key + " = ?") values.append(value) wheres = " AND ".join(query_wheres) - if wheres == "": - wheres = "1" - query = re.sub("(.*)[?]", "\\1 %s" % wheres, query) # Replace the last ? + query = re.sub("(.*)[?]", "\\1%s" % wheres, query) # Replace the last ? params = values else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format @@ -66,54 +41,18 @@ class DbCursor: keysvalues = "(%s) VALUES (%s)" % (keys, values) query = re.sub("(.*)[?]", "\\1%s" % keysvalues, query) # Replace the last ? params = tuple(params.values()) - elif isinstance(params, dict) and ":" in query: - new_params = dict() - values = [] - for key, value in params.items(): - if type(value) is list: - for idx, val in enumerate(value): - new_params[key + "__" + str(idx)] = val - new_names = [":" + key + "__" + str(idx) for idx in range(len(value))] - query = re.sub(r":" + re.escape(key) + r"([)\s]|$)", "(%s)%s" % (", ".join(new_names), r"\1"), query) - else: - new_params[key] = value + s = time.time() + # if query == "COMMIT": self.logging = True # Turn logging back on transaction commit - params = new_params - return query, params - - def execute(self, query, params=None): - query = query.strip() - while self.db.progress_sleeping or self.db.commiting: - time.sleep(0.1) - - self.db.last_query_time = time.time() - - query, params = self.parseQuery(query, params) - - cursor = self.db.getConn().cursor() - self.db.cursors.add(cursor) - if self.db.lock.locked(): - self.db.log.debug("Locked for %.3fs" % (time.time() - self.db.lock.time_lock)) - - try: - s = time.time() - self.db.lock.acquire(True) - if query.upper().strip("; ") == "VACUUM": - self.db.commit("vacuum called") - if params: - res = cursor.execute(query, params) - else: - res = cursor.execute(query) - finally: - self.db.lock.release() - - taken_query = time.time() - s - if self.logging or taken_query > 1: - if params: # Query has parameters - self.db.log.debug("Query: " + query + " " + str(params) + " (Done in %.4f)" % (time.time() - s)) - else: - self.db.log.debug("Query: " + query + " (Done in %.4f)" % (time.time() - s)) + if params: # Query has parameters + res = self.cursor.execute(query, params) + if self.logging: + self.db.log.debug(query + " " + str(params) + " (Done in %.4f)" % (time.time() - s)) + else: + res = self.cursor.execute(query) + if self.logging: + self.db.log.debug(query + " (Done in %.4f)" % (time.time() - s)) # Log query stats if self.db.collect_stats: @@ -122,39 +61,8 @@ class DbCursor: self.db.query_stats[query]["call"] += 1 self.db.query_stats[query]["time"] += time.time() - s - query_type = query.split(" ", 1)[0].upper() - is_update_query = query_type in ["UPDATE", "DELETE", "INSERT", "CREATE"] - if not self.db.need_commit and is_update_query: - self.db.need_commit = True - - if is_update_query: - return cursor - else: - return res - - def executemany(self, query, params): - while self.db.progress_sleeping or self.db.commiting: - time.sleep(0.1) - - self.db.last_query_time = time.time() - - s = time.time() - cursor = self.db.getConn().cursor() - self.db.cursors.add(cursor) - - try: - self.db.lock.acquire(True) - cursor.executemany(query, params) - finally: - self.db.lock.release() - - taken_query = time.time() - s - if self.logging or taken_query > 0.1: - self.db.log.debug("Execute many: %s (Done in %.4f)" % (query, taken_query)) - - self.db.need_commit = True - - return cursor + # if query == "BEGIN": self.logging = False # Turn logging off on transaction commit + return res # Creates on updates a database row without incrementing the rowid def insertOrUpdate(self, table, query_sets, query_wheres, oninsert={}): @@ -163,11 +71,11 @@ class DbCursor: params = query_sets params.update(query_wheres) - res = self.execute( + self.cursor.execute( "UPDATE %s SET %s WHERE %s" % (table, ", ".join(sql_sets), " AND ".join(sql_wheres)), params ) - if res.rowcount == 0: + if self.cursor.rowcount == 0: params.update(oninsert) # Add insert-only fields self.execute("INSERT INTO %s ?" % table, params) @@ -186,10 +94,8 @@ class DbCursor: # Create indexes on table # Return: True on success def createIndexes(self, table, indexes): + # indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index for index in indexes: - if not index.strip().upper().startswith("CREATE"): - self.db.log.error("Index command should start with CREATE: %s" % index) - continue self.execute(index) # Create table if not exist @@ -197,7 +103,7 @@ class DbCursor: def needTable(self, table, cols, indexes=None, version=1): current_version = self.db.getTableVersion(table) if int(current_version) < int(version): # Table need update or not extis - self.db.log.debug("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) + self.db.log.info("Table %s outdated...version: %s need: %s, rebuilding..." % (table, current_version, version)) self.createTable(table, cols) if indexes: self.createIndexes(table, indexes) @@ -238,9 +144,7 @@ class DbCursor: self.execute("INSERT INTO json ?", {"site": site_address, "directory": directory, "file_name": file_name}) res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name}) row = res.fetchone() - else: - raise Exception("Dbschema version %s not supported" % self.db.schema.get("version")) return row def close(self): - pass + self.cursor.close() diff --git a/src/Db/DbQuery.py b/src/Db/DbQuery.py index 3fb5ef73..10ce773b 100644 --- a/src/Db/DbQuery.py +++ b/src/Db/DbQuery.py @@ -9,9 +9,9 @@ class DbQuery: # Split main parts of query def parseParts(self, query): parts = re.split("(SELECT|FROM|WHERE|ORDER BY|LIMIT)", query) - parts = [_f for _f in parts if _f] # Remove empty parts - parts = [s.strip() for s in parts] # Remove whitespace - return dict(list(zip(parts[0::2], parts[1::2]))) + parts = filter(None, parts) # Remove empty parts + parts = map(lambda s: s.strip(), parts) # Remove whitespace + return dict(zip(parts[0::2], parts[1::2])) # Parse selected fields SELECT ... FROM def parseFields(self, query_select): @@ -22,8 +22,6 @@ class DbQuery: def parseWheres(self, query_where): if " AND " in query_where: return query_where.split(" AND ") - elif query_where: - return [query_where] else: return [] diff --git a/src/Db/__init__.py b/src/Db/__init__.py index e69de29b..5bede9f4 100644 --- a/src/Db/__init__.py +++ b/src/Db/__init__.py @@ -0,0 +1,3 @@ +from Db import Db +from DbQuery import DbQuery +from DbCursor import DbCursor \ No newline at end of file diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py index 0ec42615..3d6b6394 100644 --- a/src/Debug/Debug.py +++ b/src/Debug/Debug.py @@ -1,172 +1,73 @@ import sys import os -import re from Config import config # Non fatal exception class Notify(Exception): - def __init__(self, message=None): - if message: - self.message = message + def __init__(self, message): + self.message = message def __str__(self): return self.message -# Gevent greenlet.kill accept Exception type -def createNotifyType(message): - return type("Notify", (Notify, ), {"message": message}) - - -def formatExceptionMessage(err): - err_type = err.__class__.__name__ - if err.args: - err_message = err.args[-1] - else: - err_message = err.__str__() - return "%s: %s" % (err_type, err_message) - - -python_lib_dirs = [path.replace("\\", "/") for path in sys.path if re.sub(r".*[\\/]", "", path) in ("site-packages", "dist-packages")] -python_lib_dirs.append(os.path.dirname(os.__file__).replace("\\", "/")) # TODO: check if returns the correct path for PyPy - -root_dir = os.path.realpath(os.path.dirname(__file__) + "/../../") -root_dir = root_dir.replace("\\", "/") - - -def formatTraceback(items, limit=None, fold_builtin=True): - back = [] - i = 0 - prev_file_title = "" - is_prev_builtin = False - - for path, line in items: - i += 1 - is_last = i == len(items) - path = path.replace("\\", "/") - - if path.startswith("src/gevent/"): - file_title = "/" + path[len("src/gevent/"):] - is_builtin = True - is_skippable_builtin = False - elif path in ("", ""): - file_title = "(importlib)" - is_builtin = True - is_skippable_builtin = True - else: - is_skippable_builtin = False - for base in python_lib_dirs: - if path.startswith(base + "/"): - file_title = path[len(base + "/"):] - module_name, *tail = file_title.split("/") - if module_name.endswith(".py"): - module_name = module_name[:-3] - file_title = "/".join(["<%s>" % module_name] + tail) - is_builtin = True - break - else: - is_builtin = False - for base in (root_dir + "/src", root_dir + "/plugins", root_dir): - if path.startswith(base + "/"): - file_title = path[len(base + "/"):] - break - else: - # For unknown paths, do our best to hide absolute path - file_title = path - for needle in ("/zeronet/", "/core/"): - if needle in file_title.lower(): - file_title = "?/" + file_title[file_title.lower().rindex(needle) + len(needle):] - - # Path compression: A/AB/ABC/X/Y.py -> ABC/X/Y.py - # E.g.: in 'Db/DbCursor.py' the directory part is unnecessary - if not file_title.startswith("/"): - prev_part = "" - for i, part in enumerate(file_title.split("/") + [""]): - if not part.startswith(prev_part): - break - prev_part = part - file_title = "/".join(file_title.split("/")[i - 1:]) - - if is_skippable_builtin and fold_builtin: - pass - elif is_builtin and is_prev_builtin and not is_last and fold_builtin: - if back[-1] != "...": - back.append("...") - else: - if file_title == prev_file_title: - back.append("%s" % line) - else: - back.append("%s line %s" % (file_title, line)) - - prev_file_title = file_title - is_prev_builtin = is_builtin - - if limit and i >= limit: - back.append("...") - break - return back - - def formatException(err=None, format="text"): import traceback if type(err) == Notify: return err - elif type(err) == tuple and err and err[0] is not None: # Passed trackeback info + elif type(err) == tuple and err[0] is not None: # Passed trackeback info exc_type, exc_obj, exc_tb = err err = None else: # No trackeback info passed, get latest exc_type, exc_obj, exc_tb = sys.exc_info() if not err: - if hasattr(err, "message"): - err = exc_obj.message - else: - err = exc_obj - - tb = formatTraceback([[frame[0], frame[1]] for frame in traceback.extract_tb(exc_tb)]) + err = exc_obj.message + tb = [] + for frame in traceback.extract_tb(exc_tb): + path, line, function, text = frame + file = os.path.split(path)[1] + tb.append("%s line %s" % (file, line)) if format == "html": - return "%s: %s
    %s" % (repr(err), err, " > ".join(tb)) + return "%s: %s
    %s" % (exc_type.__name__, err, " > ".join(tb)) else: return "%s: %s in %s" % (exc_type.__name__, err, " > ".join(tb)) -def formatStack(limit=None): +def formatStack(): import inspect - tb = formatTraceback([[frame[1], frame[2]] for frame in inspect.stack()[1:]], limit=limit) - return " > ".join(tb) + back = [] + for stack in inspect.stack(): + frame, path, line, function, source, index = stack + file = os.path.split(path)[1] + back.append("%s line %s" % (file, line)) + return " > ".join(back) # Test if gevent eventloop blocks -import logging -import gevent -import time +if config.debug_gevent: + import logging + import gevent + import time - -num_block = 0 - - -def testBlock(): - global num_block - logging.debug("Gevent block checker started") - last_time = time.time() - while 1: - time.sleep(1) - if time.time() - last_time > 1.1: - logging.debug("Gevent block detected: %.3fs" % (time.time() - last_time - 1)) - num_block += 1 + def testBlock(): + logging.debug("Gevent block checker started") last_time = time.time() - - -gevent.spawn(testBlock) + while 1: + time.sleep(1) + if time.time() - last_time > 1.1: + logging.debug("Gevent block detected: %s" % (time.time() - last_time - 1)) + last_time = time.time() + gevent.spawn(testBlock) if __name__ == "__main__": try: - print(1 / 0) - except Exception as err: - print(type(err).__name__) - print("1/0 error: %s" % formatException(err)) + print 1 / 0 + except Exception, err: + print type(err).__name__ + print "1/0 error: %s" % formatException(err) def loadJson(): json.loads("Errr") @@ -174,13 +75,13 @@ if __name__ == "__main__": import json try: loadJson() - except Exception as err: - print(err) - print("Json load error: %s" % formatException(err)) + except Exception, err: + print err + print "Json load error: %s" % formatException(err) try: raise Notify("nothing...") - except Exception as err: - print("Notify: %s" % formatException(err)) + except Exception, err: + print "Notify: %s" % formatException(err) loadJson() diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py index d100a3b8..a95d9719 100644 --- a/src/Debug/DebugHook.py +++ b/src/Debug/DebugHook.py @@ -1,32 +1,26 @@ import sys import logging -import signal -import importlib import gevent import gevent.hub from Config import config -from . import Debug last_error = None -def shutdown(reason="Unknown"): - logging.info("Shutting down (reason: %s)..." % reason) - import main - if "file_server" in dir(main): - try: - gevent.spawn(main.file_server.stop) - if "ui_server" in dir(main): - gevent.spawn(main.ui_server.stop) - except Exception as err: - print("Proper shutdown error: %s" % err) - sys.exit(0) - else: +def shutdown(): + print "Shutting down..." + try: + if "file_server" in dir(sys.modules["main"]): + gevent.spawn(sys.modules["main"].file_server.stop) + if "ui_server" in dir(sys.modules["main"]): + gevent.spawn(sys.modules["main"].ui_server.stop) + except Exception, err: + print "Proper shutdown error: %s" % err sys.exit(0) # Store last error, ignore notify, allow manual error logging -def handleError(*args, **kwargs): +def handleError(*args): global last_error if not args: # Manual called args = sys.exc_info() @@ -35,23 +29,21 @@ def handleError(*args, **kwargs): silent = False if args[0].__name__ != "Notify": last_error = args - if args[0].__name__ == "KeyboardInterrupt": - shutdown("Keyboard interrupt") - elif not silent and args[0].__name__ != "Notify": + shutdown() + return + if not silent and args[0].__name__ != "Notify": logging.exception("Unhandled exception") - if "greenlet.py" not in args[2].tb_frame.f_code.co_filename: # Don't display error twice - sys.__excepthook__(*args, **kwargs) + sys.__excepthook__(*args) # Ignore notify errors -def handleErrorNotify(*args, **kwargs): - err = args[0] - if err.__name__ == "KeyboardInterrupt": - shutdown("Keyboard interrupt") - elif err.__name__ != "Notify": - logging.error("Unhandled exception: %s" % Debug.formatException(args)) - sys.__excepthook__(*args, **kwargs) +def handleErrorNotify(*args): + if args[0].__name__ == "KeyboardInterrupt": + shutdown() + if args[0].__name__ != "Notify": + logging.exception("Unhandled exception") + sys.__excepthook__(*args) if config.debug: # Keep last error for /Debug @@ -71,45 +63,34 @@ else: sys.excepthook(exc_info[0], exc_info[1], exc_info[2]) gevent.Greenlet = gevent.greenlet.Greenlet = ErrorhookedGreenlet - importlib.reload(gevent) - -def handleGreenletError(context, type, value, tb): - if context.__class__ is tuple and context[0].__class__.__name__ == "ThreadPool": - # Exceptions in ThreadPool will be handled in the main Thread - return None + reload(gevent) +def handleGreenletError(self, context, type, value, tb): if isinstance(value, str): # Cython can raise errors where the value is a plain string # e.g., AttributeError, "_semaphore.Semaphore has no attr", value = type(value) - - if not issubclass(type, gevent.get_hub().NOT_ERROR): + if not issubclass(type, self.NOT_ERROR): sys.excepthook(type, value, tb) -gevent.get_hub().handle_error = handleGreenletError - -try: - signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM")) -except Exception as err: - logging.debug("Error setting up SIGTERM watcher: %s" % err) - +gevent.hub.Hub.handle_error = handleGreenletError if __name__ == "__main__": import time from gevent import monkey monkey.patch_all(thread=False, ssl=False) - from . import Debug + import Debug def sleeper(num): - print("started", num) + print "started", num time.sleep(3) raise Exception("Error") - print("stopped", num) + print "stopped", num thread1 = gevent.spawn(sleeper, 1) thread2 = gevent.spawn(sleeper, 2) time.sleep(1) - print("killing...") + print "killing..." thread1.kill(exception=Debug.Notify("Worker stopped")) #thread2.throw(Debug.Notify("Throw")) - print("killed") - gevent.joinall([thread1,thread2]) + print "killed" + gevent.joinall([thread1,thread2]) \ No newline at end of file diff --git a/src/Debug/DebugLock.py b/src/Debug/DebugLock.py deleted file mode 100644 index 9cf22520..00000000 --- a/src/Debug/DebugLock.py +++ /dev/null @@ -1,24 +0,0 @@ -import time -import logging - -import gevent.lock - -from Debug import Debug - - -class DebugLock: - def __init__(self, log_after=0.01, name="Lock"): - self.name = name - self.log_after = log_after - self.lock = gevent.lock.Semaphore(1) - self.release = self.lock.release - - def acquire(self, *args, **kwargs): - s = time.time() - res = self.lock.acquire(*args, **kwargs) - time_taken = time.time() - s - if time_taken >= self.log_after: - logging.debug("%s: Waited %.3fs after called by %s" % - (self.name, time_taken, Debug.formatStack()) - ) - return res diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py index a892dc56..497d06cb 100644 --- a/src/Debug/DebugMedia.py +++ b/src/Debug/DebugMedia.py @@ -3,7 +3,6 @@ import subprocess import re import logging import time -import functools from Config import config from util import helper @@ -19,9 +18,9 @@ def findfiles(path, find_ext): elif f2 == "": return -1 else: - return helper.cmp(f1.lower(), f2.lower()) + return cmp(f1, f2) - for root, dirs, files in sorted(os.walk(path, topdown=False), key=functools.cmp_to_key(sorter)): + for root, dirs, files in sorted(os.walk(path, topdown=False), cmp=sorter): for file in sorted(files): file_path = root + "/" + file file_ext = file.split(".")[-1] @@ -45,7 +44,6 @@ def findCoffeescriptCompiler(): # Generates: all.js: merge *.js, compile coffeescript, all.css: merge *.css, vendor prefix features def merge(merged_path): - merged_path = merged_path.replace("\\", "/") merge_dir = os.path.dirname(merged_path) s = time.time() ext = merged_path.split(".")[-1] @@ -67,21 +65,19 @@ def merge(merged_path): if not changed: return # Assets not changed, nothing to do - old_parts = {} if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile - merged_old = open(merged_path, "rb").read() - for match in re.findall(rb"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): - old_parts[match[1].decode()] = match[2].strip(b"\n\r") + merged_old = open(merged_path, "rb").read().decode("utf8") + old_parts = {} + for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): + old_parts[match[1]] = match[2].strip("\n\r") - logging.debug("Merging %s (changed: %s, old parts: %s)" % (merged_path, changed, len(old_parts))) # Merge files parts = [] s_total = time.time() for file_path in findfiles(merge_dir, find_ext): - file_relative_path = file_path.replace(merge_dir + "/", "") - parts.append(b"\n/* ---- %s ---- */\n\n" % file_relative_path.encode("utf8")) + parts.append("\n\n/* ---- %s ---- */\n\n" % file_path.replace(config.data_dir, "")) if file_path.endswith(".coffee"): # Compile coffee script - if file_path in changed or file_relative_path not in old_parts: # Only recompile if changed or its not compiled before + if file_path in changed or file_path.replace(config.data_dir, "") not in old_parts: # Only recompile if changed or its not compiled before if config.coffeescript_compiler is None: config.coffeescript_compiler = findCoffeescriptCompiler() if not config.coffeescript_compiler: @@ -89,42 +85,41 @@ def merge(merged_path): return False # No coffeescript compiler, skip this file # Replace / with os separators and escape it - file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep)) + file_path_escaped = helper.shellquote(os.path.join(*file_path.split("/"))) if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file - command = config.coffeescript_compiler.replace("%s", file_path_escaped) + command = config.coffeescript_compiler % file_path_escaped else: # Put coffeescript file to end command = config.coffeescript_compiler + " " + file_path_escaped # Start compiling s = time.time() compiler = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE) - out = compiler.stdout.read() + out = compiler.stdout.read().decode("utf8") compiler.wait() logging.debug("Running: %s (Done in %.2fs)" % (command, time.time() - s)) # Check errors - if out and out.startswith(b"("): # No error found + if out and out.startswith("("): # No error found parts.append(out) else: # Put error message in place of source code error = out - logging.error("%s Compile error: %s" % (file_relative_path, error)) - error_escaped = re.escape(error).replace(b"\n", b"\\n").replace(br"\\n", br"\n") + logging.error("%s Compile error: %s" % (file_path, error)) parts.append( - b"alert('%s compile error: %s');" % - (file_relative_path.encode(), error_escaped) + "alert('%s compile error: %s');" % + (file_path, re.escape(error).replace("\n", "\\n").replace(r"\\n", r"\n")) ) else: # Not changed use the old_part - parts.append(old_parts[file_relative_path]) + parts.append(old_parts[file_path.replace(config.data_dir, "")]) else: # Add to parts - parts.append(open(file_path, "rb").read()) + parts.append(open(file_path).read().decode("utf8")) - merged = b"\n".join(parts) + merged = u"\n".join(parts) if ext == "css": # Vendor prefix css from lib.cssvendor import cssvendor merged = cssvendor.prefix(merged) - merged = merged.replace(b"\r", b"") - open(merged_path, "wb").write(merged) + merged = merged.replace("\r", "") + open(merged_path, "wb").write(merged.encode("utf8")) logging.debug("Merged %s (%.2fs)" % (merged_path, time.time() - s_total)) diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py index 482c7921..babcdca5 100644 --- a/src/Debug/DebugReloader.py +++ b/src/Debug/DebugReloader.py @@ -1,69 +1,49 @@ import logging import time -import os +import threading from Config import config -if config.debug and config.action == "main": +if config.debug: # Only load pyfilesytem if using debug mode try: - import watchdog - import watchdog.observers - import watchdog.events - logging.debug("Watchdog fs listener detected, source code autoreload enabled") - enabled = True - except Exception as err: - logging.debug("Watchdog fs listener could not be loaded: %s" % err) - enabled = False + from fs.osfs import OSFS + pyfilesystem = OSFS("src") + pyfilesystem_plugins = OSFS("plugins") + logging.debug("Pyfilesystem detected, source code autoreload enabled") + except Exception, err: + pyfilesystem = False else: - enabled = False + pyfilesystem = False class DebugReloader: - def __init__(self, paths=None): - if not paths: - paths = ["src", "plugins", config.data_dir + "/__plugins__"] - self.log = logging.getLogger("DebugReloader") + + def __init__(self, callback, directory="/"): self.last_chaged = 0 - self.callbacks = [] - if enabled: - self.observer = watchdog.observers.Observer() - event_handler = watchdog.events.FileSystemEventHandler() - event_handler.on_modified = event_handler.on_deleted = self.onChanged - event_handler.on_created = event_handler.on_moved = self.onChanged - for path in paths: - if not os.path.isdir(path): - continue - self.log.debug("Adding autoreload: %s" % path) - self.observer.schedule(event_handler, path, recursive=True) - self.observer.start() + if pyfilesystem: + self.directory = directory + self.callback = callback + logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback)) + thread = threading.Thread(target=self.addWatcher) + thread.daemon = True + thread.start() - def addCallback(self, f): - self.callbacks.append(f) - - def onChanged(self, evt): - path = evt.src_path - ext = path.rsplit(".", 1)[-1] - if ext not in ["py", "json"] or "Test" in path or time.time() - self.last_chaged < 1.0: - return False - self.last_chaged = time.time() - if os.path.isfile(path): - time_modified = os.path.getmtime(path) - else: - time_modified = 0 - self.log.debug("File changed: %s reloading source code (modified %.3fs ago)" % (evt, time.time() - time_modified)) - if time.time() - time_modified > 5: # Probably it's just an attribute change, ignore it - return False + def addWatcher(self, recursive=True): + try: + time.sleep(1) # Wait for .pyc compiles + pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) + pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) + except Exception, err: + print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err + def changed(self, evt): + if ( + not evt.path or "%s/" % config.data_dir in evt.path or + (not evt.path.endswith("py") and not evt.path.endswith("json")) or + "Test" in evt.path or + time.time() - self.last_chaged < 1 + ): + return False # Ignore *.pyc changes and no reload within 1 sec time.sleep(0.1) # Wait for lock release - for callback in self.callbacks: - try: - callback() - except Exception as err: - self.log.exception(err) - - def stop(self): - if enabled: - self.observer.stop() - self.log.debug("Stopped autoreload observer") - -watcher = DebugReloader() + self.callback() + self.last_chaged = time.time() diff --git a/src/Debug/__init__.py b/src/Debug/__init__.py index e69de29b..8632f92f 100644 --- a/src/Debug/__init__.py +++ b/src/Debug/__init__.py @@ -0,0 +1 @@ +from DebugReloader import DebugReloader \ No newline at end of file diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index c082c378..1041a8ad 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -2,7 +2,6 @@ import os import time import json -import collections import itertools # Third party modules @@ -11,18 +10,13 @@ import gevent from Debug import Debug from Config import config from util import RateLimit -from util import Msgpack +from util import StreamingMsgpack from util import helper from Plugin import PluginManager -from contextlib import closing FILE_BUFF = 1024 * 512 -class RequestError(Exception): - pass - - # Incoming requests @PluginManager.acceptPlugins class FileRequest(object): @@ -63,10 +57,10 @@ class FileRequest(object): # Don't allow other sites than locked if "site" in params and self.connection.target_onion: valid_sites = self.connection.getValidSites() - if params["site"] not in valid_sites and valid_sites != ["global"]: + if params["site"] not in valid_sites: self.response({"error": "Invalid site"}) self.connection.log( - "Site lock violation: %s not in %s, target onion: %s" % + "%s site lock violation: %s not in %s, target onion: %s" % (params["site"], valid_sites, self.connection.target_onion) ) self.connection.badAction(5) @@ -74,12 +68,16 @@ class FileRequest(object): if cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) + if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second + time.sleep(5) + self.response({"ok": "File update queued"}) # If called more than once within 15 sec only keep the last update RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params) else: func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions + s = time.time() if self.connection.cpu_time > 0.5: self.log.debug( "Delay %s %s, cpu_time used by connection: %.3fs" % @@ -88,7 +86,6 @@ class FileRequest(object): time.sleep(self.connection.cpu_time) if self.connection.cpu_time > 5: self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time) - s = time.time() if func: func(params) else: @@ -96,100 +93,74 @@ class FileRequest(object): if cmd not in ["getFile", "streamFile"]: taken = time.time() - s - taken_sent = self.connection.last_sent_time - self.connection.last_send_time - self.connection.cpu_time += taken - taken_sent + self.connection.cpu_time += taken # Update a site file request def actionUpdate(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(1) - self.connection.badAction(5) return False - inner_path = params.get("inner_path", "") - if not inner_path.endswith("content.json"): + if not params["inner_path"].endswith("content.json"): self.response({"error": "Only content.json update allowed"}) self.connection.badAction(5) return - current_content_modified = site.content_manager.contents.get(inner_path, {}).get("modified", 0) - should_validate_content = True - if "modified" in params and params["modified"] <= current_content_modified: - should_validate_content = False - valid = None # Same or earlier content as we have - - body = params["body"] - if not body: # No body sent, we have to download it first - site.log.debug("Missing body from update for file %s, downloading ..." % inner_path) - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer - try: - body = peer.getFile(site.address, inner_path).read() - except Exception as err: - site.log.debug("Can't download updated file %s: %s" % (inner_path, err)) - self.response({"error": "Invalid File update: Failed to download updated file content"}) - self.connection.badAction(5) - return + try: + content = json.loads(params["body"]) + except Exception, err: + self.log.debug("Update for %s is invalid JSON: %s" % (params["inner_path"], err)) + self.response({"error": "File invalid JSON"}) + self.connection.badAction(5) + return - if should_validate_content: - try: - if type(body) is str: - body = body.encode() - # elif type(body) is list: - # content = json.loads(bytes(list).decode()) - content = json.loads(body.decode()) - except Exception as err: - site.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) - self.response({"error": "File invalid JSON"}) - self.connection.badAction(5) - return + file_uri = "%s/%s:%s" % (site.address, params["inner_path"], content["modified"]) - file_uri = "%s/%s:%s" % (site.address, inner_path, content["modified"]) - - if self.server.files_parsing.get(file_uri): # Check if we already working on it - valid = None # Same file - else: - try: - valid = site.content_manager.verifyFile(inner_path, content) - except Exception as err: - site.log.debug("Update for %s is invalid: %s" % (inner_path, err)) - error = err - valid = False + if self.server.files_parsing.get(file_uri): # Check if we already working on it + valid = None # Same file + else: + valid = site.content_manager.verifyFile(params["inner_path"], content) if valid is True: # Valid and changed - site.log.info("Update for %s looks valid, saving..." % inner_path) + self.log.info("Update for %s/%s looks valid, saving..." % (params["site"], params["inner_path"])) self.server.files_parsing[file_uri] = True - site.storage.write(inner_path, body) + site.storage.write(params["inner_path"], params["body"]) del params["body"] - site.onFileDone(inner_path) # Trigger filedone + site.onFileDone(params["inner_path"]) # Trigger filedone - # Download every changed file from peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer - # On complete publish to other peers - diffs = params.get("diffs", {}) - site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=6), "publish_%s" % inner_path) + if params["inner_path"].endswith("content.json"): # Download every changed file from peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer + # On complete publish to other peers + diffs = params.get("diffs", {}) + site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"], diffs=diffs, limit=2), "publish_%s" % params["inner_path"]) - # Load new content file and download changed files in new thread - def downloader(): - site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) + # Load new content file and download changed files in new thread + def downloader(): + site.downloadContent(params["inner_path"], peer=peer, diffs=params.get("diffs", {})) + del self.server.files_parsing[file_uri] + + gevent.spawn(downloader) + else: del self.server.files_parsing[file_uri] - gevent.spawn(downloader) - - self.response({"ok": "Thanks, file %s updated!" % inner_path}) + self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]}) self.connection.goodAction() elif valid is None: # Not changed - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update old") # Add or get peer + if params.get("peer"): + peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer + else: + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer if peer: if not peer.connection: peer.connect(self.connection) # Assign current connection to peer - if inner_path in site.content_manager.contents: - peer.last_content_json_update = site.content_manager.contents[inner_path]["modified"] + if params["inner_path"] in site.content_manager.contents: + peer.last_content_json_update = site.content_manager.contents[params["inner_path"]]["modified"] if config.verbose: - site.log.debug( + self.log.debug( "Same version, adding new peer for locked files: %s, tasks: %s" % (peer.key, len(site.worker_manager.tasks)) ) @@ -202,155 +173,155 @@ class FileRequest(object): self.connection.badAction() else: # Invalid sign or sha hash - self.response({"error": "File %s invalid: %s" % (inner_path, error)}) + self.log.debug("Update for %s is invalid" % params["inner_path"]) + self.response({"error": "File invalid"}) self.connection.badAction(5) - def isReadable(self, site, inner_path, file, pos): - return True - # Send file content request - def handleGetFile(self, params, streaming=False): + def actionGetFile(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False try: file_path = site.storage.getPath(params["inner_path"]) - if streaming: - file_obj = site.storage.open(params["inner_path"]) - else: - file_obj = Msgpack.FilePart(file_path, "rb") - - with file_obj as file: + with StreamingMsgpack.FilePart(file_path, "rb") as file: file.seek(params["location"]) - read_bytes = params.get("read_bytes", FILE_BUFF) + file.read_bytes = FILE_BUFF file_size = os.fstat(file.fileno()).st_size - - if file_size > read_bytes: # Check if file is readable at current position (for big files) - if not self.isReadable(site, params["inner_path"], file, params["location"]): - raise RequestError("File not readable at position: %s" % params["location"]) - else: - if params.get("file_size") and params["file_size"] != file_size: - self.connection.badAction(2) - raise RequestError("File size does not match: %sB != %sB" % (params["file_size"], file_size)) - - if not streaming: - file.read_bytes = read_bytes - if params["location"] > file_size: self.connection.badAction(5) - raise RequestError("Bad file location") + raise Exception("Bad file location") - if streaming: - back = { - "size": file_size, - "location": min(file.tell() + read_bytes, file_size), - "stream_bytes": min(read_bytes, file_size - params["location"]) - } - self.response(back) - self.sendRawfile(file, read_bytes=read_bytes) - else: - back = { - "body": file, - "size": file_size, - "location": min(file.tell() + file.read_bytes, file_size) - } - self.response(back, streaming=True) + back = { + "body": file, + "size": file_size, + "location": min(file.tell() + FILE_BUFF, file_size) + } + self.response(back, streaming=True) - bytes_sent = min(read_bytes, file_size - params["location"]) # Number of bytes we going to send + bytes_sent = min(FILE_BUFF, file_size - params["location"]) # Number of bytes we going to send site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + bytes_sent if config.debug_socket: self.log.debug("File %s at position %s sent %s bytes" % (file_path, params["location"], bytes_sent)) # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") + connected_peer = site.addPeer(self.connection.ip, self.connection.port) if connected_peer: # Just added connected_peer.connect(self.connection) # Assign current connection to peer return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]} - except RequestError as err: - self.log.debug("GetFile %s %s %s request error: %s" % (self.connection, params["site"], params["inner_path"], Debug.formatException(err))) - self.response({"error": "File read error: %s" % err}) - except OSError as err: - if config.verbose: - self.log.debug("GetFile read error: %s" % Debug.formatException(err)) + except Exception, err: + self.log.debug("GetFile read error: %s" % Debug.formatException(err)) self.response({"error": "File read error"}) return False - except Exception as err: - self.log.error("GetFile exception: %s" % Debug.formatException(err)) - self.response({"error": "File read exception"}) - return False - - def actionGetFile(self, params): - return self.handleGetFile(params) + # New-style file streaming out of Msgpack context def actionStreamFile(self, params): - return self.handleGetFile(params, streaming=True) + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False + try: + if config.debug_socket: + self.log.debug("Opening file: %s" % params["inner_path"]) + with site.storage.open(params["inner_path"]) as file: + file.seek(params["location"]) + file_size = os.fstat(file.fileno()).st_size + stream_bytes = min(FILE_BUFF, file_size - params["location"]) + if stream_bytes < 0: + self.connection.badAction(5) + raise Exception("Bad file location") + + back = { + "size": file_size, + "location": min(file.tell() + FILE_BUFF, file_size), + "stream_bytes": stream_bytes + } + if config.debug_socket: + self.log.debug( + "Sending file %s from position %s to %s" % + (params["inner_path"], params["location"], back["location"]) + ) + self.response(back) + self.sendRawfile(file, read_bytes=FILE_BUFF) + + site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + stream_bytes + if config.debug_socket: + self.log.debug("File %s at position %s sent %s bytes" % (params["inner_path"], params["location"], stream_bytes)) + + # Add peer to site if not added before + connected_peer = site.addPeer(self.connection.ip, self.connection.port) + if connected_peer: # Just added + connected_peer.connect(self.connection) # Assign current connection to peer + + return {"bytes_sent": stream_bytes, "file_size": file_size, "location": params["location"]} + + except Exception, err: + self.log.debug("GetFile read error: %s" % Debug.formatException(err)) + self.response({"error": "File read error"}) + return False # Peer exchange request def actionPex(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False got_peer_keys = [] added = 0 # Add requester peer to site - connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") - + connected_peer = site.addPeer(self.connection.ip, self.connection.port) if connected_peer: # It was not registered before added += 1 connected_peer.connect(self.connection) # Assign current connection to peer # Add sent peers to site - for packed_address in itertools.chain(params.get("peers", []), params.get("peers_ipv6", [])): + for packed_address in params.get("peers", []): address = helper.unpackAddress(packed_address) got_peer_keys.append("%s:%s" % address) - if site.addPeer(*address, source="pex"): + if site.addPeer(*address): added += 1 - # Add sent onion peers to site + # Add sent peers to site for packed_address in params.get("peers_onion", []): address = helper.unpackOnionAddress(packed_address) got_peer_keys.append("%s:%s" % address) - if site.addPeer(*address, source="pex"): + if site.addPeer(*address): added += 1 # Send back peers that is not in the sent list and connectable (not port 0) - packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], ignore=got_peer_keys, allow_private=False)) + packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], got_peer_keys)) if added: site.worker_manager.onPeers() if config.verbose: self.log.debug( "Added %s peers to %s using pex, sending back %s" % - (added, site, {key: len(val) for key, val in packed_peers.items()}) + (added, site, len(packed_peers["ip4"]) + len(packed_peers["onion"])) ) - back = { - "peers": packed_peers["ipv4"], - "peers_ipv6": packed_peers["ipv6"], - "peers_onion": packed_peers["onion"] - } + back = {} + if packed_peers["ip4"]: + back["peers"] = packed_peers["ip4"] + if packed_peers["onion"]: + back["peers_onion"] = packed_peers["onion"] self.response(back) # Get modified content.json files since def actionListModified(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False modified_files = site.content_manager.listModified(params["since"]) # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") + connected_peer = site.addPeer(self.connection.ip, self.connection.port) if connected_peer: # Just added connected_peer.connect(self.connection) # Assign current connection to peer @@ -358,35 +329,41 @@ class FileRequest(object): def actionGetHashfield(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) - self.connection.badAction(5) return False # Add peer to site if not added before - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="request") + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) if not peer.connection: # Just added peer.connect(self.connection) # Assign current connection to peer peer.time_my_hashfield_sent = time.time() # Don't send again if not changed - self.response({"hashfield_raw": site.content_manager.hashfield.tobytes()}) + self.response({"hashfield_raw": site.content_manager.hashfield.tostring()}) def findHashIds(self, site, hash_ids, limit=100): - back = collections.defaultdict(lambda: collections.defaultdict(list)) + back_ip4 = {} + back_onion = {} found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit) - for hash_id, peers in found.items(): - for peer in peers: - ip_type = helper.getIpType(peer.ip) - if len(back[ip_type][hash_id]) < 20: - back[ip_type][hash_id].append(peer.packMyAddress()) - return back + for hash_id, peers in found.iteritems(): + back_onion[hash_id] = list(itertools.islice(( + helper.packOnionAddress(peer.ip, peer.port) + for peer in peers + if peer.ip.endswith("onion") + ), 50)) + back_ip4[hash_id] = list(itertools.islice(( + helper.packAddress(peer.ip, peer.port) + for peer in peers + if not peer.ip.endswith("onion") + ), 50)) + return back_ip4, back_onion def actionFindHashIds(self, params): site = self.sites.get(params["site"]) s = time.time() - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False @@ -394,55 +371,73 @@ class FileRequest(object): event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"])) if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5): time.sleep(0.1) - back = self.findHashIds(site, params["hash_ids"], limit=10) + back_ip4, back_onion = self.findHashIds(site, params["hash_ids"], limit=10) else: - back = self.findHashIds(site, params["hash_ids"]) + back_ip4, back_onion = self.findHashIds(site, params["hash_ids"]) RateLimit.called(event_key) - my_hashes = [] + # Check my hashfield + if self.server.tor_manager and self.server.tor_manager.site_onions.get(site.address): # Running onion + my_ip = helper.packOnionAddress(self.server.tor_manager.site_onions[site.address], self.server.port) + my_back = back_onion + elif config.ip_external: # External ip defined + my_ip = helper.packAddress(config.ip_external, self.server.port) + my_back = back_ip4 + else: # No external ip defined + my_ip = my_ip = helper.packAddress(self.server.ip, self.server.port) + my_back = back_ip4 + my_hashfield_set = set(site.content_manager.hashfield) for hash_id in params["hash_ids"]: if hash_id in my_hashfield_set: - my_hashes.append(hash_id) + if hash_id not in my_back: + my_back[hash_id] = [] + my_back[hash_id].append(my_ip) # Add myself if config.verbose: self.log.debug( - "Found: %s for %s hashids in %.3fs" % - ({key: len(val) for key, val in back.items()}, len(params["hash_ids"]), time.time() - s) + "Found: IP4: %s, Onion: %s for %s hashids in %.3fs" % + (len(back_ip4), len(back_onion), len(params["hash_ids"]), time.time() - s) ) - self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes}) + self.response({"peers": back_ip4, "peers_onion": back_onion}) def actionSetHashfield(self, params): site = self.sites.get(params["site"]) - if not site or not site.isServing(): # Site unknown or not serving + if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) self.connection.badAction(5) return False # Add or get peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection, source="request") + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection) if not peer.connection: peer.connect(self.connection) - peer.hashfield.replaceFromBytes(params["hashfield_raw"]) + peer.hashfield.replaceFromString(params["hashfield_raw"]) self.response({"ok": "Updated"}) + def actionSiteReload(self, params): + if self.connection.ip not in config.ip_local and self.connection.ip != config.ip_external: + self.response({"error": "Only local host allowed"}) + + site = self.sites.get(params["site"]) + site.content_manager.loadContent(params["inner_path"], add_bad_files=False) + site.storage.verifyFiles(quick_check=True) + site.updateWebsocket() + + self.response({"ok": "Reloaded"}) + + def actionSitePublish(self, params): + if self.connection.ip not in config.ip_local and self.connection.ip != config.ip_external: + self.response({"error": "Only local host allowed"}) + + site = self.sites.get(params["site"]) + num = site.publish(limit=8, inner_path=params.get("inner_path", "content.json"), diffs=params.get("diffs", {})) + + self.response({"ok": "Successfuly published to %s peers" % num}) + # Send a simple Pong! answer def actionPing(self, params): - self.response(b"Pong!") - - # Check requested port of the other peer - def actionCheckport(self, params): - if helper.getIpType(self.connection.ip) == "ipv6": - sock_address = (self.connection.ip, params["port"], 0, 0) - else: - sock_address = (self.connection.ip, params["port"]) - - with closing(helper.createSocket(self.connection.ip)) as sock: - sock.settimeout(5) - if sock.connect_ex(sock_address) == 0: - self.response({"status": "open", "ip_external": self.connection.ip}) - else: - self.response({"status": "closed", "ip_external": self.connection.ip}) + self.response("Pong!") # Unknown command def actionUnknown(self, cmd, params): diff --git a/src/File/FileServer.py b/src/File/FileServer.py index b7a942fc..9e560224 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -1,135 +1,33 @@ import logging +import urllib2 +import re import time -import random import socket -import sys import gevent -import gevent.pool -from gevent.server import StreamServer import util -from util import helper from Config import config -from .FileRequest import FileRequest -from Peer import PeerPortchecker +from FileRequest import FileRequest from Site import SiteManager -from Connection import ConnectionServer -from Plugin import PluginManager from Debug import Debug +from Connection import ConnectionServer +from util import UpnpPunch -@PluginManager.acceptPlugins class FileServer(ConnectionServer): - def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port, ip_type=config.fileserver_ip_type): - self.site_manager = SiteManager.site_manager - self.portchecker = PeerPortchecker.PeerPortchecker(self) - self.log = logging.getLogger("FileServer") - self.ip_type = ip_type - self.ip_external_list = [] - - self.supported_ip_types = ["ipv4"] # Outgoing ip_type support - if helper.getIpType(ip) == "ipv6" or self.isIpv6Supported(): - self.supported_ip_types.append("ipv6") - - if ip_type == "ipv6" or (ip_type == "dual" and "ipv6" in self.supported_ip_types): - ip = ip.replace("*", "::") - else: - ip = ip.replace("*", "0.0.0.0") - - if config.tor == "always": - port = config.tor_hs_port - config.fileserver_port = port - elif port == 0: # Use random port - port_range_from, port_range_to = list(map(int, config.fileserver_port_range.split("-"))) - port = self.getRandomPort(ip, port_range_from, port_range_to) - config.fileserver_port = port - if not port: - raise Exception("Can't find bindable port") - if not config.tor == "always": - config.saveValue("fileserver_port", port) # Save random port value for next restart - config.arguments.fileserver_port = port - + def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port): ConnectionServer.__init__(self, ip, port, self.handleRequest) - self.log.debug("Supported IP types: %s" % self.supported_ip_types) - - if ip_type == "dual" and ip == "::": - # Also bind to ipv4 addres in dual mode - try: - self.log.debug("Binding proxy to %s:%s" % ("::", self.port)) - self.stream_server_proxy = StreamServer( - ("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 - ) - except Exception as err: - self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err)) - - self.port_opened = {} - - self.sites = self.site_manager.sites + if config.ip_external: # Ip external defined in arguments + self.port_opened = True + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + self.port_opened = None # Is file server opened on router + self.upnp_port_opened = False + self.sites = {} self.last_request = time.time() self.files_parsing = {} - self.ui_server = None - - def getRandomPort(self, ip, port_range_from, port_range_to): - """Generates Random Port from given range - Args: - ip: IP Address - port_range_from: From Range - port_range_to: to Range - """ - self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to)) - tried = [] - for bind_retry in range(100): - port = random.randint(port_range_from, port_range_to) - if port in tried: - continue - tried.append(port) - sock = helper.createSocket(ip) - try: - sock.bind((ip, port)) - success = True - except Exception as err: - self.log.warning("Error binding to port %s: %s" % (port, err)) - success = False - sock.close() - if success: - self.log.info("Found unused random port: %s" % port) - return port - else: - time.sleep(0.1) - return False - - def isIpv6Supported(self): - if config.tor == "always": - return True - # Test if we can connect to ipv6 address - ipv6_testip = "fcec:ae97:8902:d810:6c92:ec67:efb2:3ec5" - try: - sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) - sock.connect((ipv6_testip, 80)) - local_ipv6 = sock.getsockname()[0] - if local_ipv6 == "::1": - self.log.debug("IPv6 not supported, no local IPv6 address") - return False - else: - self.log.debug("IPv6 supported on IP %s" % local_ipv6) - return True - except socket.error as err: - self.log.warning("IPv6 not supported: %s" % err) - return False - except Exception as err: - self.log.error("IPv6 check error: %s" % err) - return False - - def listenProxy(self): - try: - self.stream_server_proxy.serve_forever() - except Exception as err: - if err.errno == 98: # Address already in use error - self.log.debug("StreamServer proxy listen error: %s" % err) - else: - self.log.info("StreamServer proxy listen error: %s" % err) # Handle request to fileserver def handleRequest(self, connection, message): @@ -143,7 +41,7 @@ class FileServer(ConnectionServer): self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"])) req = FileRequest(self, connection) req.route(message["cmd"], message.get("req_id"), message.get("params")) - if not self.has_internet and not connection.is_private_ip: + if not self.has_internet: self.has_internet = True self.onInternetOnline() @@ -157,156 +55,183 @@ class FileServer(ConnectionServer): import imp FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest - def portCheck(self): - if config.offline: - self.log.info("Offline mode: port check disabled") - res = {"ipv4": None, "ipv6": None} - self.port_opened = res - return res + # Try to open the port using upnp + def openport(self, port=None, check=True): + if not port: + port = self.port + if self.port_opened: + return True # Port already opened + if check: # Check first if its already opened + time.sleep(1) # Wait for port open + if self.testOpenport(port, use_alternative=False)["result"] is True: + return True # Port already opened - if config.ip_external: - for ip_external in config.ip_external: - SiteManager.peer_blacklist.append((ip_external, self.port)) # Add myself to peer blacklist + if config.tor == "always": # Port opening won't work in Tor mode + return False - ip_external_types = set([helper.getIpType(ip) for ip in config.ip_external]) - res = { - "ipv4": "ipv4" in ip_external_types, - "ipv6": "ipv6" in ip_external_types - } - self.ip_external_list = config.ip_external - self.port_opened.update(res) - self.log.info("Server port opened based on configuration ipv4: %s, ipv6: %s" % (res["ipv4"], res["ipv6"])) - return res + self.log.info("Trying to open port using UpnpPunch...") + try: + UpnpPunch.ask_to_open_port(self.port, 'ZeroNet', retries=3, protos=["TCP"]) + except (UpnpPunch.UpnpError, UpnpPunch.IGDError, socket.error) as err: + self.log.error("UpnpPunch run error: %s" % + Debug.formatException(err)) + return False - self.port_opened = {} - if self.ui_server: - self.ui_server.updateWebsocket() + if self.testOpenport(port)["result"] is True: + self.upnp_port_opened = True + return True - if "ipv6" in self.supported_ip_types: - res_ipv6_thread = gevent.spawn(self.portchecker.portCheck, self.port, "ipv6") + self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port) + return False + + # Test if the port is open + def testOpenport(self, port=None, use_alternative=True): + if not port: + port = self.port + back = self.testOpenportPortchecker(port) + if back["result"] is not True and use_alternative: # If no success try alternative checker + return self.testOpenportCanyouseeme(port) else: - res_ipv6_thread = None + return back - res_ipv4 = self.portchecker.portCheck(self.port, "ipv4") - if not res_ipv4["opened"] and config.tor != "always": - if self.portchecker.portOpen(self.port): - res_ipv4 = self.portchecker.portCheck(self.port, "ipv4") + def testOpenportPortchecker(self, port=None): + self.log.info("Checking port %s using portchecker.co..." % port) + try: + data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read() + message = re.match('.*
    (.*?)
    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + except Exception, err: + message = "Error: %s" % Debug.formatException(err) + data = "" - if res_ipv6_thread is None: - res_ipv6 = {"ip": None, "opened": None} + if "closed" in message or "Error" in message: + if config.tor != "always": + self.log.info("[BAD :(] Port closed: %s" % message) + if port == self.port: + self.port_opened = False # Self port, update port_opened status + match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": False, "message": message} else: - res_ipv6 = res_ipv6_thread.get() - if res_ipv6["opened"] and not helper.getIpType(res_ipv6["ip"]) == "ipv6": - self.log.info("Invalid IPv6 address from port check: %s" % res_ipv6["ip"]) - res_ipv6["opened"] = False + self.log.info("[OK :)] Port open: %s" % message) + if port == self.port: # Self port, update port_opened status + self.port_opened = True + match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": True, "message": message} - self.ip_external_list = [] - for res_ip in [res_ipv4, res_ipv6]: - if res_ip["ip"] and res_ip["ip"] not in self.ip_external_list: - self.ip_external_list.append(res_ip["ip"]) - SiteManager.peer_blacklist.append((res_ip["ip"], self.port)) + def testOpenportCanyouseeme(self, port=None): + self.log.info("Checking port %s using canyouseeme.org..." % port) + try: + data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() + message = re.match('.*

    (.*?)

    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags + except Exception, err: + message = "Error: %s" % Debug.formatException(err) - self.log.info("Server port opened ipv4: %s, ipv6: %s" % (res_ipv4["opened"], res_ipv6["opened"])) + if "Error" in message: + if config.tor != "always": + self.log.info("[BAD :(] Port closed: %s" % message) + if port == self.port: + self.port_opened = False # Self port, update port_opened status + match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": False, "message": message} + else: + self.log.info("[OK :)] Port open: %s" % message) + if port == self.port: # Self port, update port_opened status + self.port_opened = True + match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message + if match: # Found my ip in message + config.ip_external = match.group(1) + SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + else: + config.ip_external = False + return {"result": True, "message": message} - res = {"ipv4": res_ipv4["opened"], "ipv6": res_ipv6["opened"]} - - # Add external IPs from local interfaces - interface_ips = helper.getInterfaceIps("ipv4") - if "ipv6" in self.supported_ip_types: - interface_ips += helper.getInterfaceIps("ipv6") - for ip in interface_ips: - if not helper.isPrivateIp(ip) and ip not in self.ip_external_list: - self.ip_external_list.append(ip) - res[helper.getIpType(ip)] = True # We have opened port if we have external ip - SiteManager.peer_blacklist.append((ip, self.port)) - self.log.debug("External ip found on interfaces: %s" % ip) - - self.port_opened.update(res) - - if self.ui_server: - self.ui_server.updateWebsocket() - - return res + # Set external ip without testing + def setIpExternal(self, ip_external): + logging.info("Setting external ip without testing: %s..." % ip_external) + config.ip_external = ip_external + self.port_opened = True # Check site file integrity def checkSite(self, site, check_files=False): - if site.isServing(): + if site.settings["serving"]: site.announce(mode="startup") # Announce site to tracker site.update(check_files=check_files) # Update site's content.json and download changed files site.sendMyHashfield() site.updateHashfield() + if len(site.peers) > 5: # Keep active connections if site having 5 or more peers + site.needConnections() # Check sites integrity @util.Noparallel() def checkSites(self, check_files=False, force_port_check=False): self.log.debug("Checking sites...") - s = time.time() sites_checking = False - if not self.port_opened or force_port_check: # Test and open port if not tested yet + if self.port_opened is None or force_port_check: # Test and open port if not tested yet if len(self.sites) <= 2: # Don't wait port opening on first startup sites_checking = True - for address, site in list(self.sites.items()): + for address, site in self.sites.items(): gevent.spawn(self.checkSite, site, check_files) - self.portCheck() - - if not self.port_opened["ipv4"]: + if force_port_check: + self.port_opened = None + self.openport() + if self.port_opened is False: self.tor_manager.startOnions() if not sites_checking: - check_pool = gevent.pool.Pool(5) - # Check sites integrity - for site in sorted(list(self.sites.values()), key=lambda site: site.settings.get("modified", 0), reverse=True): - if not site.isServing(): - continue - check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread + for site in sorted(self.sites.values(), key=lambda site: site.settings.get("modified", 0), reverse=True): # Check sites integrity + check_thread = gevent.spawn(self.checkSite, site, check_files) # Check in new thread time.sleep(2) if site.settings.get("modified", 0) < time.time() - 60 * 60 * 24: # Not so active site, wait some sec to finish - check_thread.join(timeout=5) - self.log.debug("Checksites done in %.3fs" % (time.time() - s)) + check_thread.join(timeout=10) def cleanupSites(self): import gc startup = True time.sleep(5 * 60) # Sites already cleaned up on startup - peers_protected = set([]) while 1: # Sites health care every 20 min - self.log.debug( - "Running site cleanup, connections: %s, internet: %s, protected peers: %s" % - (len(self.connections), self.has_internet, len(peers_protected)) - ) + self.log.debug("Running site cleanup, connections: %s, internet: %s" % (len(self.connections), self.has_internet)) - for address, site in list(self.sites.items()): - if not site.isServing(): + for address, site in self.sites.items(): + if not site.settings["serving"]: continue if not startup: - site.cleanupPeers(peers_protected) + site.cleanupPeers() time.sleep(1) # Prevent too quick request - peers_protected = set([]) - for address, site in list(self.sites.items()): - if not site.isServing(): + for address, site in self.sites.items(): + if not site.settings["serving"]: continue if site.peers: with gevent.Timeout(10, exception=False): - site.announcer.announcePex() + site.announcePex() - # Last check modification failed - if site.content_updated is False: - site.update() - elif site.bad_files: + # Retry failed files + if site.bad_files: site.retryBadFiles() - if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7: - # Keep active connections if site has been modified witin 7 days - connected_num = site.needConnections(check_site_on_reconnect=True) - - if connected_num < config.connected_limit: # This site has small amount of peers, protect them from closing - peers_protected.update([peer.key for peer in site.getConnectedPeers()]) + if not startup: # Don't do it at start up because checkSite already has needConnections at start up. + site.needConnections(check_site_on_reconnect=True) # Keep active peer connection to get the updates time.sleep(1) # Prevent too quick request @@ -315,76 +240,58 @@ class FileServer(ConnectionServer): startup = False time.sleep(60 * 20) - def announceSite(self, site): - site.announce(mode="update", pex=False) - active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60 - if site.settings["own"] or active_site: - # Check connections more frequently on own and active sites to speed-up first connections - site.needConnections(check_site_on_reconnect=True) - site.sendMyHashfield(3) - site.updateHashfield(3) + def trackersFileReloader(self): + while 1: + config.loadTrackersFile() + time.sleep(60) # Announce sites every 20 min def announceSites(self): + if config.trackers_file: + gevent.spawn(self.trackersFileReloader) + time.sleep(5 * 60) # Sites already announced on startup while 1: - config.loadTrackersFile() s = time.time() - for address, site in list(self.sites.items()): - if not site.isServing(): + for address, site in self.sites.items(): + if not site.settings["serving"]: continue - gevent.spawn(self.announceSite, site).join(timeout=10) + site.announce(mode="update", pex=False) + active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60 + if site.settings["own"] or active_site: # Check connections more frequently on own and active sites to speed-up first connections + site.needConnections(check_site_on_reconnect=True) + site.sendMyHashfield(3) + site.updateHashfield(3) time.sleep(1) taken = time.time() - s - # Query all trackers one-by-one in 20 minutes evenly distributed - sleep = max(0, 60 * 20 / len(config.trackers) - taken) - - self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep)) + sleep = max(0, 60 * 20 / len(config.trackers) - taken) # Query all trackers one-by-one in 20 minutes evenly distributed + self.log.debug("Site announce tracker done in %.3fs, sleeping for %ss..." % (taken, sleep)) time.sleep(sleep) # Detects if computer back from wakeup def wakeupWatcher(self): last_time = time.time() - last_my_ips = socket.gethostbyname_ex('')[2] while 1: time.sleep(30) - is_time_changed = time.time() - max(self.last_request, last_time) > 60 * 3 - if is_time_changed: + if time.time() - max(self.last_request, last_time) > 60 * 3: # If taken more than 3 minute then the computer was in sleep mode self.log.info( - "Wakeup detected: time warp from %0.f to %0.f (%0.f sleep seconds), acting like startup..." % + "Wakeup detected: time warp from %s to %s (%s sleep seconds), acting like startup..." % (last_time, time.time(), time.time() - last_time) ) - - my_ips = socket.gethostbyname_ex('')[2] - is_ip_changed = my_ips != last_my_ips - if is_ip_changed: - self.log.info("IP change detected from %s to %s" % (last_my_ips, my_ips)) - - if is_time_changed or is_ip_changed: self.checkSites(check_files=False, force_port_check=True) - last_time = time.time() - last_my_ips = my_ips # Bind and start serving sites def start(self, check_sites=True): - if self.stopping: - return False + self.sites = SiteManager.site_manager.list() + self.log = logging.getLogger("FileServer") - ConnectionServer.start(self) - - try: - self.stream_server.start() - except Exception as err: - self.log.error("Error listening on: %s:%s: %s" % (self.ip, self.port, err)) - - self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.reload) + DebugReloader(self.reload) if check_sites: # Open port, Update sites, Check files integrity gevent.spawn(self.checkSites) @@ -393,17 +300,16 @@ class FileServer(ConnectionServer): thread_cleanup_sites = gevent.spawn(self.cleanupSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) - ConnectionServer.listen(self) + ConnectionServer.start(self) self.log.debug("Stopped.") def stop(self): - if self.running and self.portchecker.upnp_port_opened: + if self.running and self.upnp_port_opened: self.log.debug('Closing port %d' % self.port) try: - self.portchecker.portClose(self.port) + UpnpPunch.ask_to_close_port(self.port, protos=["TCP"]) self.log.info('Closed port via upnp.') - except Exception as err: + except (UpnpPunch.UpnpError, UpnpPunch.IGDError), err: self.log.info("Failed at attempt to use upnp to close port: %s" % err) - - return ConnectionServer.stop(self) + ConnectionServer.stop(self) diff --git a/src/File/__init__.py b/src/File/__init__.py index 1eb602d6..20b28a97 100644 --- a/src/File/__init__.py +++ b/src/File/__init__.py @@ -1,2 +1,2 @@ -from .FileServer import FileServer -from .FileRequest import FileRequest \ No newline at end of file +from FileServer import FileServer +from FileRequest import FileRequest \ No newline at end of file diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index 03cc1f47..fdafcb50 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -1,29 +1,24 @@ import logging import time import sys -import itertools -import collections import gevent -import io +from cStringIO import StringIO from Debug import Debug from Config import config from util import helper -from .PeerHashfield import PeerHashfield -from Plugin import PluginManager +from PeerHashfield import PeerHashfield if config.use_tempfiles: import tempfile # Communicate remote peers -@PluginManager.acceptPlugins class Peer(object): __slots__ = ( - "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield", - "time_added", "has_hashfield", "is_tracker_connection", "time_my_hashfield_sent", "last_ping", "reputation", - "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" + "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield", "time_added", "has_hashfield", + "time_my_hashfield_sent", "last_ping", "reputation", "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" ) def __init__(self, ip, port, site=None, connection_server=None): @@ -41,7 +36,6 @@ class Peer(object): self.time_response = None # Time of last successful response from peer self.time_added = time.time() self.last_ping = None # Last response time for ping - self.is_tracker_connection = False # Tracker connection instead of normal peer self.reputation = 0 # More likely to connect if larger self.last_content_json_update = 0.0 # Modify date of last received content.json @@ -68,19 +62,13 @@ class Peer(object): # Connect to host def connect(self, connection=None): - if self.reputation < -10: - self.reputation = -10 - if self.reputation > 10: - self.reputation = 10 - if self.connection: self.log("Getting connection (Closing %s)..." % self.connection) self.connection.close("Connection change") else: - self.log("Getting connection (reputation: %s)..." % self.reputation) + self.log("Getting connection...") if connection: # Connection specified - self.log("Assigning connection %s" % connection) self.connection = connection self.connection.sites += 1 else: # Try to find from connection pool or create new connection @@ -88,21 +76,18 @@ class Peer(object): try: if self.connection_server: - connection_server = self.connection_server + self.connection = self.connection_server.getConnection(self.ip, self.port, site=self.site) elif self.site: - connection_server = self.site.connection_server + self.connection = self.site.connection_server.getConnection(self.ip, self.port, site=self.site) else: - import main - connection_server = main.file_server - self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection) - self.reputation += 1 + self.connection = sys.modules["main"].file_server.getConnection(self.ip, self.port, site=self.site) self.connection.sites += 1 - except Exception as err: + + except Exception, err: self.onConnectionError("Getting connection error") self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) self.connection = None - return self.connection # Check if we have connection to peer def findConnection(self): @@ -115,10 +100,7 @@ class Peer(object): return self.connection def __str__(self): - if self.site: - return "Peer:%-12s of %s" % (self.ip, self.site.address_short) - else: - return "Peer:%-12s" % self.ip + return "Peer:%-12s" % self.ip def __repr__(self): return "<%s>" % self.__str__() @@ -129,19 +111,8 @@ class Peer(object): else: return helper.packAddress(self.ip, self.port) - # Found a peer from a source - def found(self, source="other"): - if self.reputation < 5: - if source == "tracker": - if self.ip.endswith(".onion"): - self.reputation += 1 - else: - self.reputation += 2 - elif source == "local": - self.reputation += 20 - - if source in ("tracker", "local"): - self.site.peers_recent.appendleft(self) + # Found a peer on tracker + def found(self): self.time_found = time.time() # Send a command to peer and return response value @@ -152,12 +123,10 @@ class Peer(object): self.onConnectionError("Reconnect error") return None # Connection failed - self.log("Send request: %s %s %s %s" % (params.get("site", ""), cmd, params.get("inner_path", ""), params.get("location", ""))) + self.log("Send request: %s %s" % (params.get("site", ""), cmd)) for retry in range(1, 4): # Retry 3 times try: - if not self.connection: - raise Exception("No connection found") res = self.connection.request(cmd, params, stream_to) if not res: raise Exception("Send error") @@ -168,11 +137,8 @@ class Peer(object): else: # Successful request, reset connection error num self.connection_error = 0 self.time_response = time.time() - if res: - return res - else: - raise Exception("Invalid response: %s" % res) - except Exception as err: + return res + except Exception, err: if type(err).__name__ == "Notify": # Greenlet killed by worker self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) break @@ -187,56 +153,62 @@ class Peer(object): return None # Failed after 4 retry # Get a file content from peer - def getFile(self, site, inner_path, file_size=None, pos_from=0, pos_to=None, streaming=False): - if file_size and file_size > 5 * 1024 * 1024: - max_read_size = 1024 * 1024 - else: - max_read_size = 512 * 1024 - - if pos_to: - read_bytes = min(max_read_size, pos_to - pos_from) - else: - read_bytes = max_read_size - - location = pos_from + def getFile(self, site, inner_path): + # Use streamFile if client supports it + if config.stream_downloads and self.connection and self.connection.handshake and self.connection.handshake["rev"] > 310: + return self.streamFile(site, inner_path) + location = 0 if config.use_tempfiles: buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') else: - buff = io.BytesIO() + buff = StringIO() s = time.time() - while True: # Read in smaller parts - if config.stream_downloads or read_bytes > 256 * 1024 or streaming: - res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}, stream_to=buff) - if not res or "location" not in res: # Error - return False - else: - self.log("Send: %s" % inner_path) - res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}) - if not res or "location" not in res: # Error - return False - self.log("Recv: %s" % inner_path) - buff.write(res["body"]) - res["body"] = None # Save memory + while True: # Read in 512k parts + res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) - if res["location"] == res["size"] or res["location"] == pos_to: # End of file + if not res or "body" not in res: # Error + return False + + buff.write(res["body"]) + res["body"] = None # Save memory + if res["location"] == res["size"]: # End of file break else: location = res["location"] - if pos_to: - read_bytes = min(max_read_size, pos_to - location) - if pos_to: - recv = pos_to - pos_from - else: - recv = res["location"] - - self.download_bytes += recv + self.download_bytes += res["location"] self.download_time += (time.time() - s) if self.site: - self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + recv - self.log("Downloaded: %s, pos: %s, read_bytes: %s" % (inner_path, buff.tell(), read_bytes)) + self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"] + buff.seek(0) + return buff + + # Download file out of msgpack context to save memory and cpu + def streamFile(self, site, inner_path): + location = 0 + if config.use_tempfiles: + buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') + else: + buff = StringIO() + + s = time.time() + while True: # Read in 512k parts + res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location}, stream_to=buff) + + if not res or "location" not in res: # Error + self.log("Invalid response: %s" % res) + return False + + if res["location"] == res["size"]: # End of file + break + else: + location = res["location"] + + self.download_bytes += res["location"] + self.download_time += (time.time() - s) + self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"] buff.seek(0) return buff @@ -248,7 +220,7 @@ class Peer(object): with gevent.Timeout(10.0, False): # 10 sec timeout, don't raise exception res = self.request("ping") - if res and "body" in res and res["body"] == b"Pong!": + if res and "body" in res and res["body"] == "Pong!": response_time = time.time() - s break # All fine, exit from for loop # Timeout reached or bad response @@ -269,34 +241,23 @@ class Peer(object): site = self.site # If no site defined request peers for this site # give back 5 connectible peers - packed_peers = helper.packPeers(self.site.getConnectablePeers(5, allow_private=False)) - request = {"site": site.address, "peers": packed_peers["ipv4"], "need": need_num} + packed_peers = helper.packPeers(self.site.getConnectablePeers(5)) + request = {"site": site.address, "peers": packed_peers["ip4"], "need": need_num} if packed_peers["onion"]: request["peers_onion"] = packed_peers["onion"] - if packed_peers["ipv6"]: - request["peers_ipv6"] = packed_peers["ipv6"] res = self.request("pex", request) if not res or "error" in res: return False added = 0 - - # Remove unsupported peer types - if "peers_ipv6" in res and self.connection and "ipv6" not in self.connection.server.supported_ip_types: - del res["peers_ipv6"] - - if "peers_onion" in res and self.connection and "onion" not in self.connection.server.supported_ip_types: - del res["peers_onion"] - - # Add IPv4 + IPv6 - for peer in itertools.chain(res.get("peers", []), res.get("peers_ipv6", [])): + # Ip4 + for peer in res.get("peers", []): address = helper.unpackAddress(peer) - if site.addPeer(*address, source="pex"): + if site.addPeer(*address): added += 1 - - # Add Onion + # Onion for peer in res.get("peers_onion", []): address = helper.unpackOnionAddress(peer) - if site.addPeer(*address, source="pex"): + if site.addPeer(*address): added += 1 if added: @@ -310,15 +271,15 @@ class Peer(object): return self.request("listModified", {"since": since, "site": self.site.address}) def updateHashfield(self, force=False): - # Don't update hashfield again in 5 min - if self.time_hashfield and time.time() - self.time_hashfield < 5 * 60 and not force: + # Don't update hashfield again in 15 min + if self.time_hashfield and time.time() - self.time_hashfield > 60 * 15 and not force: return False self.time_hashfield = time.time() res = self.request("getHashfield", {"site": self.site.address}) - if not res or "error" in res or "hashfield_raw" not in res: + if not res or "error" in res: return False - self.hashfield.replaceFromBytes(res["hashfield_raw"]) + self.hashfield.replaceFromString(res["hashfield_raw"]) return self.hashfield @@ -326,29 +287,15 @@ class Peer(object): # Return: {hash1: ["ip:port", "ip:port",...],...} def findHashIds(self, hash_ids): res = self.request("findHashIds", {"site": self.site.address, "hash_ids": hash_ids}) - if not res or "error" in res or type(res) is not dict: + if not res or "error" in res: return False - - back = collections.defaultdict(list) - - for ip_type in ["ipv4", "ipv6", "onion"]: - if ip_type == "ipv4": - key = "peers" - else: - key = "peers_%s" % ip_type - for hash, peers in list(res.get(key, {}).items())[0:30]: - if ip_type == "onion": - unpacker_func = helper.unpackOnionAddress - else: - unpacker_func = helper.unpackAddress - - back[hash] += list(map(unpacker_func, peers)) - - for hash in res.get("my", []): - if self.connection: - back[hash].append((self.connection.ip, self.connection.port)) - else: - back[hash].append((self.ip, self.port)) + # Unpack IP4 + back = {key: map(helper.unpackAddress, val) for key, val in res["peers"].items()[0:30]} + # Unpack onion + for hash, onion_peers in res.get("peers_onion", {}).items()[0:30]: + if hash not in back: + back[hash] = [] + back[hash] += map(helper.unpackOnionAddress, onion_peers) return back @@ -360,35 +307,18 @@ class Peer(object): if self.time_my_hashfield_sent and self.site.content_manager.hashfield.time_changed <= self.time_my_hashfield_sent: return False # Peer already has the latest hashfield - res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tobytes()}) + res = self.request("setHashfield", {"site": self.site.address, "hashfield_raw": self.site.content_manager.hashfield.tostring()}) if not res or "error" in res: return False else: self.time_my_hashfield_sent = time.time() return True - def publish(self, address, inner_path, body, modified, diffs=[]): - if len(body) > 10 * 1024 and self.connection and self.connection.handshake.get("rev", 0) >= 4095: - # To save bw we don't push big content.json to peers - body = b"" - - return self.request("update", { - "site": address, - "inner_path": inner_path, - "body": body, - "modified": modified, - "diffs": diffs - }) - # Stop and remove from site def remove(self, reason="Removing"): self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) if self.site and self.key in self.site.peers: del(self.site.peers[self.key]) - - if self.site and self in self.site.peers_recent: - self.site.peers_recent.remove(self) - if self.connection: self.connection.close(reason) @@ -397,12 +327,7 @@ class Peer(object): # On connection error def onConnectionError(self, reason="Unknown"): self.connection_error += 1 - if self.site and len(self.site.peers) > 200: - limit = 3 - else: - limit = 6 - self.reputation -= 1 - if self.connection_error >= limit: # Dead peer + if self.connection_error >= 6: # Dead peer self.remove("Peer connection: %s" % reason) # Done working with peer diff --git a/src/Peer/PeerHashfield.py b/src/Peer/PeerHashfield.py index fdd414c8..050d47f4 100644 --- a/src/Peer/PeerHashfield.py +++ b/src/Peer/PeerHashfield.py @@ -3,7 +3,7 @@ import time class PeerHashfield(object): - __slots__ = ("storage", "time_changed", "append", "remove", "tobytes", "frombytes", "__len__", "__iter__") + __slots__ = ("storage", "time_changed", "append", "remove", "tostring", "fromstring", "__len__", "__iter__") def __init__(self): self.storage = self.createStorage() self.time_changed = time.time() @@ -12,8 +12,8 @@ class PeerHashfield(object): storage = array.array("H") self.append = storage.append self.remove = storage.remove - self.tobytes = storage.tobytes - self.frombytes = storage.frombytes + self.tostring = storage.tostring + self.fromstring = storage.fromstring self.__len__ = storage.__len__ self.__iter__ = storage.__iter__ return storage @@ -58,9 +58,9 @@ class PeerHashfield(object): def hasHash(self, hash): return int(hash[0:4], 16) in self.storage - def replaceFromBytes(self, hashfield_raw): + def replaceFromString(self, hashfield_raw): self.storage = self.createStorage() - self.storage.frombytes(hashfield_raw) + self.storage.fromstring(hashfield_raw) self.time_changed = time.time() if __name__ == "__main__": @@ -68,8 +68,8 @@ if __name__ == "__main__": s = time.time() for i in range(10000): field.appendHashId(i) - print(time.time()-s) + print time.time()-s s = time.time() for i in range(10000): field.hasHash("AABB") - print(time.time()-s) \ No newline at end of file + print time.time()-s \ No newline at end of file diff --git a/src/Peer/PeerPortchecker.py b/src/Peer/PeerPortchecker.py deleted file mode 100644 index 3c4daecf..00000000 --- a/src/Peer/PeerPortchecker.py +++ /dev/null @@ -1,189 +0,0 @@ -import logging -import urllib.request -import urllib.parse -import re -import time - -from Debug import Debug -from util import UpnpPunch - - -class PeerPortchecker(object): - checker_functions = { - "ipv4": ["checkIpfingerprints", "checkCanyouseeme"], - "ipv6": ["checkMyaddr", "checkIpv6scanner"] - } - def __init__(self, file_server): - self.log = logging.getLogger("PeerPortchecker") - self.upnp_port_opened = False - self.file_server = file_server - - def requestUrl(self, url, post_data=None): - if type(post_data) is dict: - post_data = urllib.parse.urlencode(post_data).encode("utf8") - req = urllib.request.Request(url, post_data) - req.add_header("Referer", url) - req.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11") - req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8") - return urllib.request.urlopen(req, timeout=20.0) - - def portOpen(self, port): - self.log.info("Trying to open port using UpnpPunch...") - - try: - UpnpPunch.ask_to_open_port(port, 'ZeroNet', retries=3, protos=["TCP"]) - self.upnp_port_opened = True - except Exception as err: - self.log.warning("UpnpPunch run error: %s" % Debug.formatException(err)) - return False - - return True - - def portClose(self, port): - return UpnpPunch.ask_to_close_port(port, protos=["TCP"]) - - def portCheck(self, port, ip_type="ipv4"): - checker_functions = self.checker_functions[ip_type] - - for func_name in checker_functions: - func = getattr(self, func_name) - s = time.time() - try: - res = func(port) - if res: - self.log.info( - "Checked port %s (%s) using %s result: %s in %.3fs" % - (port, ip_type, func_name, res, time.time() - s) - ) - time.sleep(0.1) - if res["opened"] and not self.file_server.had_external_incoming: - res["opened"] = False - self.log.warning("Port %s:%s looks opened, but no incoming connection" % (res["ip"], port)) - break - except Exception as err: - self.log.warning( - "%s check error: %s in %.3fs" % - (func_name, Debug.formatException(err), time.time() - s) - ) - res = {"ip": None, "opened": False} - - return res - - def checkCanyouseeme(self, port): - data = urllib.request.urlopen("https://www.canyouseeme.org/", b"ip=1.1.1.1&port=%s" % str(port).encode("ascii"), timeout=20.0).read().decode("utf8") - - message = re.match(r'.*

    (.*?)

    ', data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags - - match = re.match(r".*service on (.*?) on", message) - if match: - ip = match.group(1) - else: - raise Exception("Invalid response: %s" % message) - - if "Success" in message: - return {"ip": ip, "opened": True} - elif "Error" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkIpfingerprints(self, port): - data = self.requestUrl("https://www.ipfingerprints.com/portscan.php").read().decode("utf8") - ip = re.match(r'.*name="remoteHost".*?value="(.*?)"', data, re.DOTALL).group(1) - - post_data = { - "remoteHost": ip, "start_port": port, "end_port": port, - "normalScan": "Yes", "scan_type": "connect2", "ping_type": "none" - } - message = self.requestUrl("https://www.ipfingerprints.com/scripts/getPortsInfo.php", post_data).read().decode("utf8") - - if "open" in message: - return {"ip": ip, "opened": True} - elif "filtered" in message or "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkMyaddr(self, port): - url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php" - - data = self.requestUrl(url).read().decode("utf8") - - ip = re.match(r'.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) - - post_data = {"addr": ip, "ports_selected": "", "ports_list": port} - data = self.requestUrl(url, post_data).read().decode("utf8") - - message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) - - if "ok.png" in message: - return {"ip": ip, "opened": True} - elif "fail.png" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkIpv6scanner(self, port): - url = "http://www.ipv6scanner.com/cgi-bin/main.py" - - data = self.requestUrl(url).read().decode("utf8") - - ip = re.match(r'.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) - - post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"} - data = self.requestUrl(url, post_data).read().decode("utf8") - - message = re.match(r".*(.*?)
    ", data, re.DOTALL).group(1) - message_text = re.sub("<.*?>", " ", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - if "OPEN" in message_text: - return {"ip": ip, "opened": True} - elif "CLOSED" in message_text or "FILTERED" in message_text: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message_text) - - def checkPortchecker(self, port): # Not working: Forbidden - data = self.requestUrl("https://portchecker.co").read().decode("utf8") - csrf = re.match(r'.*name="_csrf" value="(.*?)"', data, re.DOTALL).group(1) - - data = self.requestUrl("https://portchecker.co", {"port": port, "_csrf": csrf}).read().decode("utf8") - message = re.match(r'.*
    (.*?)
    ', data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - match = re.match(r".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) - if match: - ip = match.group(1) - else: - raise Exception("Invalid response: %s" % message) - - if "open" in message: - return {"ip": ip, "opened": True} - elif "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) - - def checkSubnetonline(self, port): # Not working: Invalid response - url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php" - - data = self.requestUrl(url).read().decode("utf8") - - ip = re.match(r'.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) - token = re.match(r'.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) - - post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."} - data = self.requestUrl(url, post_data).read().decode("utf8") - - print(post_data, data) - - message = re.match(r".*
    (.*?)
    ", data, re.DOTALL).group(1) - message = re.sub(r"<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - - if "online" in message: - return {"ip": ip, "opened": True} - elif "closed" in message: - return {"ip": ip, "opened": False} - else: - raise Exception("Invalid response: %s" % message) diff --git a/src/Peer/__init__.py b/src/Peer/__init__.py index e73c58c5..3e92827f 100644 --- a/src/Peer/__init__.py +++ b/src/Peer/__init__.py @@ -1,2 +1,2 @@ -from .Peer import Peer -from .PeerHashfield import PeerHashfield +from Peer import Peer +from PeerHashfield import PeerHashfield diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py index 56540e60..2a3c89f1 100644 --- a/src/Plugin/PluginManager.py +++ b/src/Plugin/PluginManager.py @@ -1,188 +1,72 @@ import logging import os import sys -import shutil -import time from collections import defaultdict -import importlib -import json - from Debug import Debug from Config import config -import plugins class PluginManager: def __init__(self): self.log = logging.getLogger("PluginManager") - self.path_plugins = None - if plugins.__file__: - self.path_plugins = os.path.dirname(os.path.abspath(plugins.__file__)); - self.path_installed_plugins = config.data_dir + "/__plugins__" + self.plugin_path = "plugins" # Plugin directory self.plugins = defaultdict(list) # Registered plugins (key: class name, value: list of plugins for class) self.subclass_order = {} # Record the load order of the plugins, to keep it after reload self.pluggable = {} self.plugin_names = [] # Loaded plugin names - self.plugins_updated = {} # List of updated plugins since restart - self.plugins_rev = {} # Installed plugins revision numbers - self.after_load = [] # Execute functions after loaded plugins - self.function_flags = {} # Flag function for permissions - self.reloading = False - self.config_path = config.data_dir + "/plugins.json" - self.loadConfig() - self.config.setdefault("builtin", {}) - - if self.path_plugins: - sys.path.append(os.path.join(os.getcwd(), self.path_plugins)) - self.migratePlugins() + sys.path.append(self.plugin_path) if config.debug: # Auto reload Plugins on file change from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.reloadPlugins) - - def loadConfig(self): - if os.path.isfile(self.config_path): - try: - self.config = json.load(open(self.config_path, encoding="utf8")) - except Exception as err: - self.log.error("Error loading %s: %s" % (self.config_path, err)) - self.config = {} - else: - self.config = {} - - def saveConfig(self): - f = open(self.config_path, "w", encoding="utf8") - json.dump(self.config, f, ensure_ascii=False, sort_keys=True, indent=2) - - def migratePlugins(self): - for dir_name in os.listdir(self.path_plugins): - if dir_name == "Mute": - self.log.info("Deleting deprecated/renamed plugin: %s" % dir_name) - shutil.rmtree("%s/%s" % (self.path_plugins, dir_name)) + DebugReloader(self.reloadPlugins) # -- Load / Unload -- - def listPlugins(self, list_disabled=False): - plugins = [] - for dir_name in sorted(os.listdir(self.path_plugins)): - dir_path = os.path.join(self.path_plugins, dir_name) - plugin_name = dir_name.replace("disabled-", "") - if dir_name.startswith("disabled"): - is_enabled = False - else: - is_enabled = True - - plugin_config = self.config["builtin"].get(plugin_name, {}) - if "enabled" in plugin_config: - is_enabled = plugin_config["enabled"] - - if dir_name == "__pycache__" or not os.path.isdir(dir_path): - continue # skip - if dir_name.startswith("Debug") and not config.debug: - continue # Only load in debug mode if module name starts with Debug - if not is_enabled and not list_disabled: - continue # Dont load if disabled - - plugin = {} - plugin["source"] = "builtin" - plugin["name"] = plugin_name - plugin["dir_name"] = dir_name - plugin["dir_path"] = dir_path - plugin["inner_path"] = plugin_name - plugin["enabled"] = is_enabled - plugin["rev"] = config.rev - plugin["loaded"] = plugin_name in self.plugin_names - plugins.append(plugin) - - plugins += self.listInstalledPlugins(list_disabled) - return plugins - - def listInstalledPlugins(self, list_disabled=False): - plugins = [] - - for address, site_plugins in sorted(self.config.items()): - if address == "builtin": - continue - for plugin_inner_path, plugin_config in sorted(site_plugins.items()): - is_enabled = plugin_config.get("enabled", False) - if not is_enabled and not list_disabled: - continue - plugin_name = os.path.basename(plugin_inner_path) - - dir_path = "%s/%s/%s" % (self.path_installed_plugins, address, plugin_inner_path) - - plugin = {} - plugin["source"] = address - plugin["name"] = plugin_name - plugin["dir_name"] = plugin_name - plugin["dir_path"] = dir_path - plugin["inner_path"] = plugin_inner_path - plugin["enabled"] = is_enabled - plugin["rev"] = plugin_config.get("rev", 0) - plugin["loaded"] = plugin_name in self.plugin_names - plugins.append(plugin) - - return plugins - # Load all plugin def loadPlugins(self): - all_loaded = True - s = time.time() - if self.path_plugins is None: - return - for plugin in self.listPlugins(): - self.log.debug("Loading plugin: %s (%s)" % (plugin["name"], plugin["source"])) - if plugin["source"] != "builtin": - self.plugins_rev[plugin["name"]] = plugin["rev"] - site_plugin_dir = os.path.dirname(plugin["dir_path"]) - if site_plugin_dir not in sys.path: - sys.path.append(site_plugin_dir) + for dir_name in sorted(os.listdir(self.plugin_path)): + dir_path = os.path.join(self.plugin_path, dir_name) + if dir_name.startswith("disabled"): + continue # Dont load if disabled + if not os.path.isdir(dir_path): + continue # Dont load if not dir + if dir_name.startswith("Debug") and not config.debug: + continue # Only load in debug mode if module name starts with Debug + self.log.debug("Loading plugin: %s" % dir_name) try: - sys.modules[plugin["name"]] = __import__(plugin["dir_name"]) - except Exception as err: - self.log.error("Plugin %s load error: %s" % (plugin["name"], Debug.formatException(err))) - all_loaded = False - if plugin["name"] not in self.plugin_names: - self.plugin_names.append(plugin["name"]) - - self.log.debug("Plugins loaded in %.3fs" % (time.time() - s)) - for func in self.after_load: - func() - return all_loaded + __import__(dir_name) + except Exception, err: + self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err))) + if dir_name not in self.plugin_names: + self.plugin_names.append(dir_name) # Reload all plugins def reloadPlugins(self): - self.reloading = True - self.after_load = [] self.plugins_before = self.plugins self.plugins = defaultdict(list) # Reset registered plugins - for module_name, module in list(sys.modules.items()): - if not module or not getattr(module, "__file__", None): - continue - if self.path_plugins not in module.__file__ and self.path_installed_plugins not in module.__file__: - continue - - if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled - # Re-add non-reloadable plugins - for class_name, classes in self.plugins_before.items(): - for c in classes: - if c.__module__ != module.__name__: - continue - self.plugins[class_name].append(c) - else: - try: - importlib.reload(module) - except Exception as err: - self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) + for module_name, module in sys.modules.items(): + if module and "__file__" in dir(module) and self.plugin_path in module.__file__: # Module file within plugin_path + if "allow_reload" in dir(module) and not module.allow_reload: # Reload disabled + # Re-add non-reloadable plugins + for class_name, classes in self.plugins_before.iteritems(): + for c in classes: + if c.__module__ != module.__name__: + continue + self.plugins[class_name].append(c) + else: + try: + reload(module) + except Exception, err: + self.log.error("Plugin %s reload error: %s" % (module_name, Debug.formatException(err))) self.loadPlugins() # Load new plugins # Change current classes in memory import gc patched = {} - for class_name, classes in self.plugins.items(): + for class_name, classes in self.plugins.iteritems(): classes = classes[:] # Copy the current plugins classes.reverse() base_class = self.pluggable[class_name] # Original class @@ -196,8 +80,8 @@ class PluginManager: # Change classes in modules patched = {} - for class_name, classes in self.plugins.items(): - for module_name, module in list(sys.modules.items()): + for class_name, classes in self.plugins.iteritems(): + for module_name, module in sys.modules.iteritems(): if class_name in dir(module): if "__class__" not in dir(getattr(module, class_name)): # Not a class continue @@ -210,7 +94,6 @@ class PluginManager: patched[class_name] = patched.get(class_name, 0) + 1 self.log.debug("Patched modules: %s" % patched) - self.reloading = False plugin_manager = PluginManager() # Singletone @@ -235,7 +118,7 @@ def acceptPlugins(base_class): if str(key) in plugin_manager.subclass_order[class_name] else 9999 ) - plugin_manager.subclass_order[class_name] = list(map(str, classes)) + plugin_manager.subclass_order[class_name] = map(str, classes) classes.reverse() classes.append(base_class) # Add the class itself to end of inherience line @@ -248,13 +131,6 @@ def acceptPlugins(base_class): # Register plugin to class name decorator def registerTo(class_name): - if config.debug and not plugin_manager.reloading: - import gc - for obj in gc.get_objects(): - if type(obj).__name__ == class_name: - raise Exception("Class %s instances already present in memory" % class_name) - break - plugin_manager.log.debug("New plugin registered to: %s" % class_name) if class_name not in plugin_manager.plugins: plugin_manager.plugins[class_name] = [] @@ -265,11 +141,6 @@ def registerTo(class_name): return classDecorator -def afterLoad(func): - plugin_manager.after_load.append(func) - return func - - # - Example usage - if __name__ == "__main__": @@ -289,4 +160,4 @@ if __name__ == "__main__": else: return "Can't route to", path - print(Request().route("MainPage")) + print Request().route("MainPage") diff --git a/src/Site/Site.py b/src/Site/Site.py index d6179307..54f13794 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -1,50 +1,49 @@ import os import json import logging +import hashlib import re import time import random import sys -import hashlib -import collections -import base64 +import struct +import socket +import urllib +import urllib2 import gevent import gevent.pool import util +from lib import bencode +from lib.subtl.subtl import UdpTrackerClient from Config import config from Peer import Peer from Worker import WorkerManager from Debug import Debug from Content import ContentManager -from .SiteStorage import SiteStorage +from SiteStorage import SiteStorage from Crypt import CryptHash from util import helper from util import Diff -from util import GreenletManager from Plugin import PluginManager -from File import FileServer -from .SiteAnnouncer import SiteAnnouncer -from . import SiteManager +import SiteManager @PluginManager.acceptPlugins class Site(object): def __init__(self, address, allow_create=True, settings=None): - self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address - self.address_hash = hashlib.sha256(self.address.encode("ascii")).digest() - self.address_sha1 = hashlib.sha1(self.address.encode("ascii")).digest() + self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging self.log = logging.getLogger("Site:%s" % self.address_short) self.addEventListeners() self.content = None # Load content.json self.peers = {} # Key: ip:port, Value: Peer.Peer - self.peers_recent = collections.deque(maxlen=150) self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself) - self.greenlet_manager = GreenletManager.GreenletManager() # Running greenlets + self.time_announce = 0 # Last announce time to tracker + self.last_tracker_id = random.randint(0, 10) # Last announced tracker id self.worker_manager = WorkerManager(self) # Handle site download from other peers self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept) self.content_updated = None # Content.js update time @@ -53,30 +52,22 @@ class Site(object): self.websockets = [] # Active site websocket connections self.connection_server = None - self.loadSettings(settings) # Load settings from sites.json self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files + self.loadSettings(settings) # Load settings from sites.json self.content_manager = ContentManager(self) self.content_manager.loadContents() # Load content.json files - if "main" in sys.modules: # import main has side-effects, breaks tests - import main - if "file_server" in dir(main): # Use global file server by default if possible - self.connection_server = main.file_server - else: - main.file_server = FileServer() - self.connection_server = main.file_server + if "main" in sys.modules and "file_server" in dir(sys.modules["main"]): # Use global file server by default if possible + self.connection_server = sys.modules["main"].file_server else: - self.connection_server = FileServer() - - self.announcer = SiteAnnouncer(self) # Announce and get peer list from other nodes + self.connection_server = None + if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed) + self.settings["auth_key"] = CryptHash.random() + self.log.debug("New auth key: %s" % self.settings["auth_key"]) if not self.settings.get("wrapper_key"): # To auth websocket permissions self.settings["wrapper_key"] = CryptHash.random() self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"]) - if not self.settings.get("ajax_key"): # To auth websocket permissions - self.settings["ajax_key"] = CryptHash.random() - self.log.debug("New ajax key: %s" % self.settings["ajax_key"]) - def __str__(self): return "Site %s" % self.address_short @@ -95,23 +86,16 @@ class Site(object): settings["size_optional"] = 0 if "optional_downloaded" not in settings: settings["optional_downloaded"] = 0 - if "downloaded" not in settings: - settings["downloaded"] = settings.get("added") self.bad_files = settings["cache"].get("bad_files", {}) settings["cache"]["bad_files"] = {} - # Give it minimum 10 tries after restart + # Reset tries for inner_path in self.bad_files: - self.bad_files[inner_path] = min(self.bad_files[inner_path], 20) + self.bad_files[inner_path] = 1 else: - self.settings = { - "own": False, "serving": True, "permissions": [], "cache": {"bad_files": {}}, "size_files_optional": 0, - "added": int(time.time()), "downloaded": None, "optional_downloaded": 0, "size_optional": 0 - } # Default - if config.download_optional == "auto": - self.settings["autodownloadoptional"] = True + self.settings = {"own": False, "serving": True, "permissions": [], "added": int(time.time()), "optional_downloaded": 0, "size_optional": 0} # Default # Add admin permissions to homepage - if self.address in (config.homepage, config.updatesite) and "ADMIN" not in self.settings["permissions"]: + if self.address == config.homepage and "ADMIN" not in self.settings["permissions"]: self.settings["permissions"].append("ADMIN") return @@ -123,19 +107,7 @@ class Site(object): if not SiteManager.site_manager.sites.get(self.address): SiteManager.site_manager.sites[self.address] = self SiteManager.site_manager.load(False) - SiteManager.site_manager.saveDelayed() - - def isServing(self): - if config.offline: - return False - else: - return self.settings["serving"] - - def getSettingsCache(self): - back = {} - back["bad_files"] = self.bad_files - back["hashfield"] = base64.b64encode(self.content_manager.hashfield.tobytes()).decode("ascii") - return back + SiteManager.site_manager.save() # Max site size in MB def getSizeLimit(self): @@ -143,64 +115,41 @@ class Site(object): # Next size limit based on current size def getNextSizeLimit(self): - size_limits = [25, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] + size_limits = [10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, 100000] size = self.settings.get("size", 0) for size_limit in size_limits: if size * 1.2 < size_limit * 1024 * 1024: return size_limit return 999999 - def isAddedRecently(self): - return time.time() - self.settings.get("added", 0) < 60 * 60 * 24 - # Download all file from content.json def downloadContent(self, inner_path, download_files=True, peer=None, check_modifications=False, diffs={}): s = time.time() if config.verbose: - self.log.debug( - "DownloadContent %s: Started. (download_files: %s, check_modifications: %s, diffs: %s)..." % - (inner_path, download_files, check_modifications, diffs.keys()) - ) - - if not inner_path.endswith("content.json"): - return False + self.log.debug("Downloading %s..." % inner_path) found = self.needFile(inner_path, update=self.bad_files.get(inner_path)) content_inner_dir = helper.getDirname(inner_path) if not found: - self.log.debug("DownloadContent %s: Download failed, check_modifications: %s" % (inner_path, check_modifications)) + self.log.debug("Download %s failed, check_modifications: %s" % (inner_path, check_modifications)) if check_modifications: # Download failed, but check modifications if its succed later self.onFileDone.once(lambda file_name: self.checkModifications(0), "check_modifications") return False # Could not download content.json if config.verbose: - self.log.debug("DownloadContent got %s" % inner_path) - sub_s = time.time() - + self.log.debug("Got %s" % inner_path) changed, deleted = self.content_manager.loadContent(inner_path, load_includes=False) - if config.verbose: - self.log.debug("DownloadContent %s: loadContent done in %.3fs" % (inner_path, time.time() - sub_s)) - if inner_path == "content.json": self.saveSettings() if peer: # Update last received update from peer to prevent re-sending the same update to it peer.last_content_json_update = self.content_manager.contents[inner_path]["modified"] - # Verify size limit - if inner_path == "content.json": - site_size_limit = self.getSizeLimit() * 1024 * 1024 - content_size = len(json.dumps(self.content_manager.contents[inner_path], indent=1)) + sum([file["size"] for file in list(self.content_manager.contents[inner_path].get("files", {}).values()) if file["size"] >= 0]) # Size of new content - if site_size_limit < content_size: - # Not enought don't download anything - self.log.debug("DownloadContent Size limit reached (site too big please increase limit): %.2f MB > %.2f MB" % (content_size / 1024 / 1024, site_size_limit / 1024 / 1024)) - return False - # Start download files file_threads = [] if download_files: - for file_relative_path in list(self.content_manager.contents[inner_path].get("files", {}).keys()): + for file_relative_path in self.content_manager.contents[inner_path].get("files", {}).keys(): file_inner_path = content_inner_dir + file_relative_path # Try to diff first @@ -208,31 +157,16 @@ class Site(object): diff_actions = diffs.get(file_relative_path) if diff_actions and self.bad_files.get(file_inner_path): try: - s = time.time() new_file = Diff.patch(self.storage.open(file_inner_path, "rb"), diff_actions) new_file.seek(0) - time_diff = time.time() - s - - s = time.time() diff_success = self.content_manager.verifyFile(file_inner_path, new_file) - time_verify = time.time() - s - if diff_success: - s = time.time() + self.log.debug("Patched successfully: %s" % file_inner_path) new_file.seek(0) self.storage.write(file_inner_path, new_file) - time_write = time.time() - s - - s = time.time() self.onFileDone(file_inner_path) - time_on_done = time.time() - s - - self.log.debug( - "DownloadContent Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % - (file_inner_path, time_diff, time_verify, time_write, time_on_done) - ) - except Exception as err: - self.log.debug("DownloadContent Failed to patch %s: %s" % (file_inner_path, err)) + except Exception, err: + self.log.debug("Failed to patch %s: %s" % (file_inner_path, err)) diff_success = False if not diff_success: @@ -245,7 +179,7 @@ class Site(object): if inner_path == "content.json": gevent.spawn(self.updateHashfield) - for file_relative_path in list(self.content_manager.contents[inner_path].get("files_optional", {}).keys()): + for file_relative_path in self.content_manager.contents[inner_path].get("files_optional", {}).keys(): file_inner_path = content_inner_dir + file_relative_path if file_inner_path not in changed and not self.bad_files.get(file_inner_path): continue @@ -260,27 +194,28 @@ class Site(object): # Wait for includes download include_threads = [] - for file_relative_path in list(self.content_manager.contents[inner_path].get("includes", {}).keys()): + for file_relative_path in self.content_manager.contents[inner_path].get("includes", {}).keys(): file_inner_path = content_inner_dir + file_relative_path include_thread = gevent.spawn(self.downloadContent, file_inner_path, download_files=download_files, peer=peer) include_threads.append(include_thread) if config.verbose: - self.log.debug("DownloadContent %s: Downloading %s includes..." % (inner_path, len(include_threads))) + self.log.debug("%s: Downloading %s includes..." % (inner_path, len(include_threads))) gevent.joinall(include_threads) if config.verbose: - self.log.debug("DownloadContent %s: Includes download ended" % inner_path) + self.log.debug("%s: Includes download ended" % inner_path) if check_modifications: # Check if every file is up-to-date self.checkModifications(0) if config.verbose: - self.log.debug("DownloadContent %s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) + self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) gevent.joinall(file_threads) if config.verbose: - self.log.debug("DownloadContent %s: ended in %.3fs (tasks left: %s)" % ( - inner_path, time.time() - s, len(self.worker_manager.tasks) - )) + self.log.debug("%s: DownloadContent ended in %.3fs" % (inner_path, time.time() - s)) + + if not self.worker_manager.tasks: + self.onComplete() # No more task trigger site complete return True @@ -288,17 +223,14 @@ class Site(object): def getReachableBadFiles(self): if not self.bad_files: return False - return [bad_file for bad_file, retry in self.bad_files.items() if retry < 3] + return [bad_file for bad_file, retry in self.bad_files.iteritems() if retry < 3] # Retry download bad files def retryBadFiles(self, force=False): - self.checkBadFiles() - self.log.debug("Retry %s bad files" % len(self.bad_files)) content_inner_paths = [] file_inner_paths = [] - - for bad_file, tries in list(self.bad_files.items()): + for bad_file, tries in self.bad_files.items(): if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min if bad_file.endswith("content.json"): content_inner_paths.append(bad_file) @@ -311,36 +243,14 @@ class Site(object): if file_inner_paths: self.pooledDownloadFile(file_inner_paths, only_if_bad=True) - def checkBadFiles(self): - for bad_file in list(self.bad_files.keys()): - file_info = self.content_manager.getFileInfo(bad_file) - if bad_file.endswith("content.json"): - if file_info is False and bad_file != "content.json": - del self.bad_files[bad_file] - self.log.debug("No info for file: %s, removing from bad_files" % bad_file) - else: - if file_info is False or not file_info.get("size"): - del self.bad_files[bad_file] - self.log.debug("No info or size for file: %s, removing from bad_files" % bad_file) - # Download all files of the site @util.Noparallel(blocking=False) - def download(self, check_size=False, blind_includes=False, retry_bad_files=True): - if not self.connection_server: - self.log.debug("No connection server found, skipping download") - return False - - s = time.time() + def download(self, check_size=False, blind_includes=False): self.log.debug( - "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s, isAddedRecently: %s" % - (self.bad_files, check_size, blind_includes, self.isAddedRecently()) + "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s" % + (self.bad_files, check_size, blind_includes) ) - - if self.isAddedRecently(): - gevent.spawn(self.announce, mode="start", force=True) - else: - gevent.spawn(self.announce, mode="update") - + gevent.spawn(self.announce) if check_size: # Check the size first valid = self.downloadContent("content.json", download_files=False) # Just download content.json files if not valid: @@ -349,56 +259,38 @@ class Site(object): # Download everything valid = self.downloadContent("content.json", check_modifications=blind_includes) - if retry_bad_files: - self.onComplete.once(lambda: self.retryBadFiles(force=True)) - self.log.debug("Download done in %.3fs" % (time.time() - s)) + self.onComplete.once(lambda: self.retryBadFiles(force=True)) return valid def pooledDownloadContent(self, inner_paths, pool_size=100, only_if_bad=False): - self.log.debug("New downloadContent pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad)) + self.log.debug("New downloadContent pool: len: %s" % len(inner_paths)) self.worker_manager.started_task_num += len(inner_paths) pool = gevent.pool.Pool(pool_size) - num_skipped = 0 - site_size_limit = self.getSizeLimit() * 1024 * 1024 for inner_path in inner_paths: if not only_if_bad or inner_path in self.bad_files: pool.spawn(self.downloadContent, inner_path) - else: - num_skipped += 1 self.worker_manager.started_task_num -= 1 - if self.settings["size"] > site_size_limit * 0.95: - self.log.warning("Site size limit almost reached, aborting downloadContent pool") - for aborted_inner_path in inner_paths: - if aborted_inner_path in self.bad_files: - del self.bad_files[aborted_inner_path] - self.worker_manager.removeSolvedFileTasks(mark_as_good=False) - break - pool.join() - self.log.debug("Ended downloadContent pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) + self.log.debug("Ended downloadContent pool len: %s" % len(inner_paths)) def pooledDownloadFile(self, inner_paths, pool_size=100, only_if_bad=False): - self.log.debug("New downloadFile pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad)) + self.log.debug("New downloadFile pool: len: %s" % len(inner_paths)) self.worker_manager.started_task_num += len(inner_paths) pool = gevent.pool.Pool(pool_size) - num_skipped = 0 for inner_path in inner_paths: if not only_if_bad or inner_path in self.bad_files: pool.spawn(self.needFile, inner_path, update=True) - else: - num_skipped += 1 self.worker_manager.started_task_num -= 1 - self.log.debug("Ended downloadFile pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) + self.log.debug("Ended downloadFile pool len: %s" % len(inner_paths)) # Update worker, try to find client that supports listModifications command def updater(self, peers_try, queried, since): - threads = [] while 1: if not peers_try or len(queried) >= 3: # Stop after 3 successful query break peer = peers_try.pop(0) if config.verbose: - self.log.debug("CheckModifications: Try to get updates from: %s Left: %s" % (peer, peers_try)) + self.log.debug("Try to get updates from: %s Left: %s" % (peer, peers_try)) res = None with gevent.Timeout(20, exception=False): @@ -410,8 +302,7 @@ class Site(object): queried.append(peer) modified_contents = [] my_modified = self.content_manager.listModified(since) - num_old_files = 0 - for inner_path, modified in res["modified_files"].items(): # Check if the peer has newer files than we + for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we has_newer = int(modified) > my_modified.get(inner_path, 0) has_older = int(modified) < my_modified.get(inner_path, 0) if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified): @@ -419,18 +310,13 @@ class Site(object): # We dont have this file or we have older modified_contents.append(inner_path) self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 - if has_older and num_old_files < 5: - num_old_files += 1 - self.log.debug("CheckModifications: %s client has older version of %s, publishing there (%s/5)..." % (peer, inner_path, num_old_files)) + if has_older: + self.log.debug("%s client has older version of %s, publishing there..." % (peer, inner_path)) gevent.spawn(self.publisher, inner_path, [peer], [], 1) if modified_contents: - self.log.debug("CheckModifications: %s new modified file from %s" % (len(modified_contents), peer)) + self.log.debug("%s new modified file from %s" % (len(modified_contents), peer)) modified_contents.sort(key=lambda inner_path: 0 - res["modified_files"][inner_path]) # Download newest first - t = gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True) - threads.append(t) - if config.verbose: - self.log.debug("CheckModifications: Waiting for %s pooledDownloadContent" % len(threads)) - gevent.joinall(threads) + gevent.spawn(self.pooledDownloadContent, modified_contents) # Check modified content.json files from peers and add modified files to bad_files # Return: Successfully queried peers [Peer, Peer...] @@ -442,10 +328,10 @@ class Site(object): # Wait for peers if not self.peers: - self.announce(mode="update") + self.announce() for wait in range(10): time.sleep(5 + wait) - self.log.debug("CheckModifications: Waiting for peers...") + self.log.debug("Waiting for peers...") if self.peers: break @@ -456,12 +342,7 @@ class Site(object): if since is None: # No since defined, download from last modification time-1day since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24 - - if config.verbose: - self.log.debug( - "CheckModifications: Try to get listModifications from peers: %s, connected: %s, since: %s" % - (peers_try, peers_connected_num, since) - ) + self.log.debug("Try to get listModifications from peers: %s, connected: %s, since: %s" % (peers_try, peers_connected_num, since)) updaters = [] for i in range(3): @@ -470,13 +351,13 @@ class Site(object): gevent.joinall(updaters, timeout=10) # Wait 10 sec to workers done query modifications if not queried: # Start another 3 thread if first 3 is stuck - peers_try[0:0] = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Add connected peers + peers_try[0:0] = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Add really connected peers for _ in range(10): gevent.joinall(updaters, timeout=10) # Wait another 10 sec if none of updaters finished if queried: break - self.log.debug("CheckModifications: Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since)) + self.log.debug("Queried listModifications from: %s in %.3fs" % (queried, time.time() - s)) time.sleep(0.1) return queried @@ -486,20 +367,20 @@ class Site(object): def update(self, announce=False, check_files=False, since=None): self.content_manager.loadContent("content.json", load_includes=False) # Reload content.json self.content_updated = None # Reset content updated time - - if check_files: - self.storage.updateBadFiles(quick_check=True) # Quick check and mark bad files based on file size - - if not self.isServing(): - return False - self.updateWebsocket(updating=True) # Remove files that no longer in content.json - self.checkBadFiles() + for bad_file in self.bad_files.keys(): + if bad_file.endswith("content.json"): + continue + + file_info = self.content_manager.getFileInfo(bad_file) + if file_info is False or not file_info.get("size"): + del self.bad_files[bad_file] + self.log.debug("No info for file: %s, removing from bad_files" % bad_file) if announce: - self.announce(mode="update", force=True) + self.announce() # Full update, we can reset bad files if check_files and since == 0: @@ -507,6 +388,9 @@ class Site(object): queried = self.checkModifications(since) + if check_files: + self.storage.updateBadFiles(quick_check=True) # Quick check and mark bad files based on file size + changed, deleted = self.content_manager.loadContent("content.json", load_includes=False) if self.bad_files: @@ -516,8 +400,7 @@ class Site(object): if len(queried) == 0: # Failed to query modifications self.content_updated = False - else: - self.content_updated = time.time() + self.bad_files["content.json"] = 1 self.updateWebsocket(updated=True) @@ -525,7 +408,7 @@ class Site(object): def redownloadContents(self): # Download all content.json again content_threads = [] - for inner_path in list(self.content_manager.contents.keys()): + for inner_path in self.content_manager.contents.keys(): content_threads.append(self.needFile(inner_path, update=True, blocking=False)) self.log.debug("Waiting %s content.json to finish..." % len(content_threads)) @@ -537,12 +420,28 @@ class Site(object): content_json_modified = self.content_manager.contents[inner_path]["modified"] body = self.storage.read(inner_path) + # Find out my ip and port + tor_manager = self.connection_server.tor_manager + if tor_manager and tor_manager.enabled and tor_manager.start_onions: + my_ip = tor_manager.getOnion(self.address) + if my_ip: + my_ip += ".onion" + my_port = config.fileserver_port + else: + my_ip = config.ip_external + if self.connection_server.port_opened: + my_port = config.fileserver_port + else: + my_port = 0 + while 1: if not peers or len(published) >= limit: if event_done: event_done.set(True) break # All peers done, or published engouht - peer = peers.pop() + peer = peers.pop(0) + if peer in peers: # Remove duplicate + peers.remove(peer) if peer in published: continue if peer.last_content_json_update == content_json_modified: @@ -560,10 +459,16 @@ class Site(object): for retry in range(2): try: with gevent.Timeout(timeout, False): - result = peer.publish(self.address, inner_path, body, content_json_modified, diffs) + result = peer.request("update", { + "site": self.address, + "inner_path": inner_path, + "body": body, + "diffs": diffs, + "peer": (my_ip, my_port) + }) if result: break - except Exception as err: + except Exception, err: self.log.error("Publish error: %s" % Debug.formatException(err)) result = {"exception": Debug.formatException(err)} @@ -585,7 +490,7 @@ class Site(object): publishers = [] # Publisher threads if not self.peers: - self.announce(mode="more") + self.announce() if limit == "default": limit = 5 @@ -597,13 +502,11 @@ class Site(object): random.shuffle(peers) peers = sorted(peers, key=lambda peer: peer.connection.handshake.get("rev", 0) < config.rev - 100) # Prefer newer clients - if len(peers) < limit * 2 and len(self.peers) > len(peers): # Add more, non-connected peers if necessary + if len(peers) < limit * 2: # Add more, non-connected peers if necessary peers += self.getRecentPeers(limit * 2) - peers = set(peers) - self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % ( - inner_path, limit, len(self.peers), num_connected_peers, list(diffs.keys()), float(len(str(diffs))) / 1024 + inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024 )) if not peers: @@ -622,7 +525,7 @@ class Site(object): # Publish more peers in the backgroup self.log.info( - "Published %s to %s peers, publishing to %s more peers in the background" % + "Successfuly %s published to %s peers, publishing to %s more peers in the background" % (inner_path, len(published), limit) ) @@ -635,7 +538,6 @@ class Site(object): return len(published) # Copy this site - @util.Noparallel() def clone(self, address, privatekey=None, address_index=None, root_inner_path="", overwrite=False): import shutil new_site = SiteManager.site_manager.need(address, all_file=False) @@ -648,16 +550,11 @@ class Site(object): # Copy root content.json if not new_site.storage.isFile("content.json") and not overwrite: - # New site: Content.json not exist yet, create a new one from source site - if "size_limit" in self.settings: - new_site.settings["size_limit"] = self.settings["size_limit"] - - # Use content.json-default is specified + # Content.json not exist yet, create a new one from source site if self.storage.isFile(root_inner_path + "/content.json-default"): content_json = self.storage.loadJson(root_inner_path + "/content.json-default") else: content_json = self.storage.loadJson("content.json") - if "domain" in content_json: del content_json["domain"] content_json["title"] = "my" + content_json["title"] @@ -672,14 +569,8 @@ class Site(object): ) # Copy files - for content_inner_path, content in list(self.content_manager.contents.items()): - file_relative_paths = list(content.get("files", {}).keys()) - - # Sign content.json at the end to make sure every file is included - file_relative_paths.sort() - file_relative_paths.sort(key=lambda key: key.replace("-default", "").endswith("content.json")) - - for file_relative_path in file_relative_paths: + for content_inner_path, content in self.content_manager.contents.items(): + for file_relative_path in sorted(content["files"].keys()): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to content.json file_inner_path = file_inner_path.strip("/") # Strip leading / if not file_inner_path.startswith(root_inner_path): @@ -702,15 +593,15 @@ class Site(object): dest_dir = os.path.dirname(file_path_dest) if not os.path.isdir(dest_dir): os.makedirs(dest_dir) - if file_inner_path_dest.replace("-default", "") == "content.json": # Don't copy root content.json-default + if file_inner_path_dest == "content.json-default": # Don't copy root content.json-default continue shutil.copy(file_path, file_path_dest) # If -default in path, create a -default less copy of the file - if "-default" in file_inner_path_dest: - file_path_dest = new_site.storage.getPath(file_inner_path_dest.replace("-default", "")) - if new_site.storage.isFile(file_inner_path_dest.replace("-default", "")) and not overwrite: + if "-default" in file_inner_path: + file_path_dest = new_site.storage.getPath(file_inner_path.replace("-default", "")) + if new_site.storage.isFile(file_inner_path.replace("-default", "")) and not overwrite: # Don't overwrite site files with default ones self.log.debug("[SKIP] Default file: %s (already exist)" % file_inner_path) continue @@ -721,19 +612,19 @@ class Site(object): shutil.copy(file_path, file_path_dest) # Sign if content json if file_path_dest.endswith("/content.json"): - new_site.storage.onUpdated(file_inner_path_dest.replace("-default", "")) + new_site.storage.onUpdated(file_inner_path.replace("-default", "")) new_site.content_manager.loadContent( - file_inner_path_dest.replace("-default", ""), add_bad_files=False, + file_inner_path.replace("-default", ""), add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign(file_inner_path_dest.replace("-default", ""), privatekey, remove_missing_optional=True) + new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey) new_site.content_manager.loadContent( - file_inner_path_dest, add_bad_files=False, delete_removed_files=False, load_includes=False + file_inner_path, add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign("content.json", privatekey, remove_missing_optional=True) + new_site.content_manager.sign("content.json", privatekey) new_site.content_manager.loadContent( "content.json", add_bad_files=False, delete_removed_files=False, load_includes=False ) @@ -741,10 +632,7 @@ class Site(object): # Rebuild DB if new_site.storage.isFile("dbschema.json"): new_site.storage.closeDb() - try: - new_site.storage.rebuildDb() - except Exception as err: - self.log.error(err) + new_site.storage.rebuildDb() return new_site @@ -752,60 +640,35 @@ class Site(object): def pooledNeedFile(self, *args, **kwargs): return self.needFile(*args, **kwargs) - def isFileDownloadAllowed(self, inner_path, file_info): - # Verify space for all site - if self.settings["size"] > self.getSizeLimit() * 1024 * 1024: - return False - # Verify space for file - if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024: - self.log.debug( - "File size %s too large: %sMB > %sMB, skipping..." % - (inner_path, file_info.get("size", 0) / 1024 / 1024, config.file_size_limit) - ) - return False - else: - return True - - def needFileInfo(self, inner_path): - file_info = self.content_manager.getFileInfo(inner_path) - if not file_info: - # No info for file, download all content.json first - self.log.debug("No info for %s, waiting for all content.json" % inner_path) - success = self.downloadContent("content.json", download_files=False) - if not success: - return False - file_info = self.content_manager.getFileInfo(inner_path) - return file_info - # Check and download if file not exist def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0): - if self.worker_manager.tasks.findTask(inner_path): - task = self.worker_manager.addTask(inner_path, peer, priority=priority) - if blocking: - return task["evt"].get() - else: - return task["evt"] - elif self.storage.isFile(inner_path) and not update: # File exist, no need to do anything + if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything return True - elif not self.isServing(): # Site not serving + elif self.settings["serving"] is False: # Site not serving return False else: # Wait until file downloaded + self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file if not self.content_manager.contents.get("content.json"): # No content.json, download it first! - self.log.debug("Need content.json first (inner_path: %s, priority: %s)" % (inner_path, priority)) - if priority > 0: - gevent.spawn(self.announce) + self.log.debug("Need content.json first") + gevent.spawn(self.announce) if inner_path != "content.json": # Prevent double download task = self.worker_manager.addTask("content.json", peer) - task["evt"].get() + task.get() self.content_manager.loadContent() if not self.content_manager.contents.get("content.json"): return False # Content.json download failed - file_info = None if not inner_path.endswith("content.json"): - file_info = self.needFileInfo(inner_path) + file_info = self.content_manager.getFileInfo(inner_path) if not file_info: - return False + # No info for file, download all content.json first + self.log.debug("No info for %s, waiting for all content.json" % inner_path) + success = self.downloadContent("content.json", download_files=False) + if not success: + return False + file_info = self.content_manager.getFileInfo(inner_path) + if not file_info: + return False # Still no info for file if "cert_signers" in file_info and not file_info["content_inner_path"] in self.content_manager.contents: self.log.debug("Missing content.json for requested user file: %s" % inner_path) if self.bad_files.get(file_info["content_inner_path"], 0) > 5: @@ -815,30 +678,22 @@ class Site(object): return False self.downloadContent(file_info["content_inner_path"]) - if not self.isFileDownloadAllowed(inner_path, file_info): - self.log.debug("%s: Download not allowed" % inner_path) - return False - - self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 # Mark as bad file - - task = self.worker_manager.addTask(inner_path, peer, priority=priority, file_info=file_info) + task = self.worker_manager.addTask(inner_path, peer, priority=priority) if blocking: - return task["evt"].get() + return task.get() else: - return task["evt"] + return task # Add or update a peer to site # return_peer: Always return the peer even if it was already present - def addPeer(self, ip, port, return_peer=False, connection=None, source="other"): - if not ip or ip == "0.0.0.0": + def addPeer(self, ip, port, return_peer=False, connection=None): + if not ip: return False - key = "%s:%s" % (ip, port) - peer = self.peers.get(key) - if peer: # Already has this ip - peer.found(source) + if key in self.peers: # Already has this ip + self.peers[key].found() if return_peer: # Always return peer - return peer + return self.peers[key] else: return False else: # New peer @@ -846,20 +701,189 @@ class Site(object): return False # Ignore blacklist (eg. myself) peer = Peer(ip, port, self) self.peers[key] = peer - peer.found(source) return peer - def announce(self, *args, **kwargs): - if self.isServing(): - self.announcer.announce(*args, **kwargs) + # Gather peer from connected peers + @util.Noparallel(blocking=False) + def announcePex(self, query_num=2, need_num=5): + peers = [peer for peer in self.peers.values() if peer.connection and peer.connection.connected] # Connected peers + if len(peers) == 0: # Small number of connected peers for this site, connect to any + self.log.debug("Small number of peers detected...query all of peers using pex") + peers = self.peers.values() + need_num = 10 + + random.shuffle(peers) + done = 0 + added = 0 + for peer in peers: + res = peer.pex(need_num=need_num) + if type(res) == int: # We have result + done += 1 + added += res + if res: + self.worker_manager.onPeers() + self.updateWebsocket(peers_added=res) + if done == query_num: + break + self.log.debug("Queried pex from %s peers got %s new peers." % (done, added)) + + # Gather peers from tracker + # Return: Complete time or False on error + def announceTracker(self, tracker_protocol, tracker_address, fileserver_port=0, add_types=[], my_peer_id="", mode="start"): + s = time.time() + if "ip4" not in add_types: + fileserver_port = 0 + + if tracker_protocol == "udp": # Udp tracker + if config.disable_udp: + return False # No udp supported + ip, port = tracker_address.split(":") + tracker = UdpTrackerClient(ip, int(port)) + tracker.peer_port = fileserver_port + try: + tracker.connect() + tracker.poll_once() + tracker.announce(info_hash=hashlib.sha1(self.address).hexdigest(), num_want=50) + back = tracker.poll_once() + peers = back["response"]["peers"] + except Exception, err: + return False + + elif tracker_protocol == "http": # Http tracker + params = { + 'info_hash': hashlib.sha1(self.address).digest(), + 'peer_id': my_peer_id, 'port': fileserver_port, + 'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30, + 'event': 'started' + } + req = None + try: + url = "http://" + tracker_address + "?" + urllib.urlencode(params) + # Load url + with gevent.Timeout(30, False): # Make sure of timeout + req = urllib2.urlopen(url, timeout=25) + response = req.read() + req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions + req.close() + req = None + if not response: + self.log.debug("Http tracker %s response error" % url) + return False + # Decode peers + peer_data = bencode.decode(response)["peers"] + response = None + peer_count = len(peer_data) / 6 + peers = [] + for peer_offset in xrange(peer_count): + off = 6 * peer_offset + peer = peer_data[off:off + 6] + addr, port = struct.unpack('!LH', peer) + peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port}) + except Exception, err: + self.log.debug("Http tracker %s error: %s" % (url, err)) + if req: + req.close() + req = None + return False + else: + peers = [] + + # Adding peers + added = 0 + for peer in peers: + if not peer["port"]: + continue # Dont add peers with port 0 + if self.addPeer(peer["addr"], peer["port"]): + added += 1 + if added: + self.worker_manager.onPeers() + self.updateWebsocket(peers_added=added) + self.log.debug("Found %s peers, new: %s, total: %s" % (len(peers), added, len(self.peers))) + return time.time() - s + + # Add myself and get other peers from tracker + def announce(self, force=False, mode="start", pex=True): + if time.time() < self.time_announce + 30 and not force: + return # No reannouncing within 30 secs + self.time_announce = time.time() + + trackers = config.trackers + # Filter trackers based on supported networks + if config.disable_udp: + trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")] + if self.connection_server and self.connection_server.tor_manager and not self.connection_server.tor_manager.enabled: + trackers = [tracker for tracker in trackers if ".onion" not in tracker] + + if trackers and (mode == "update" or mode == "more"): # Only announce on one tracker, increment the queried tracker id + self.last_tracker_id += 1 + self.last_tracker_id = self.last_tracker_id % len(trackers) + trackers = [trackers[self.last_tracker_id]] # We only going to use this one + + errors = [] + slow = [] + add_types = [] + if self.connection_server: + my_peer_id = self.connection_server.peer_id + + # Type of addresses they can reach me + if self.connection_server.port_opened: + add_types.append("ip4") + if self.connection_server.tor_manager and self.connection_server.tor_manager.start_onions: + add_types.append("onion") + else: + my_peer_id = "" + + s = time.time() + announced = 0 + threads = [] + fileserver_port = config.fileserver_port + + for tracker in trackers: # Start announce threads + tracker_protocol, tracker_address = tracker.split("://") + thread = gevent.spawn( + self.announceTracker, tracker_protocol, tracker_address, fileserver_port, add_types, my_peer_id, mode + ) + threads.append(thread) + thread.tracker_address = tracker_address + thread.tracker_protocol = tracker_protocol + + gevent.joinall(threads, timeout=10) # Wait for announce finish + + for thread in threads: + if thread.value: + if thread.value > 1: + slow.append("%.2fs %s://%s" % (thread.value, thread.tracker_protocol, thread.tracker_address)) + announced += 1 + else: + if thread.ready(): + errors.append("%s://%s" % (thread.tracker_protocol, thread.tracker_address)) + else: # Still running + slow.append("10s+ %s://%s" % (thread.tracker_protocol, thread.tracker_address)) + + # Save peers num + self.settings["peers"] = len(self.peers) + + if len(errors) < len(threads): # Less errors than total tracker nums + self.log.debug( + "Announced types %s in mode %s to %s trackers in %.3fs, errors: %s, slow: %s" % + (add_types, mode, announced, time.time() - s, errors, slow) + ) + else: + if mode != "update": + self.log.error("Announce to %s trackers in %.3fs, failed" % (announced, time.time() - s)) + + if pex: + if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]: + # If no connected peer yet then wait for connections + gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later + else: # Else announce immediately + if mode == "more": # Need more peers + self.announcePex(need_num=10) + else: + self.announcePex() # Keep connections to get the updates - def needConnections(self, num=None, check_site_on_reconnect=False): - if num is None: - if len(self.peers) < 50: - num = 3 - else: - num = 6 + def needConnections(self, num=4, check_site_on_reconnect=False): need = min(len(self.peers), num, config.connected_limit) # Need 5 peer, but max total peers connected = len(self.getConnectedPeers()) @@ -869,111 +893,64 @@ class Site(object): self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers))) if connected < need: # Need more than we have - for peer in self.getRecentPeers(30): + for peer in self.peers.values(): if not peer.connection or not peer.connection.connected: # No peer connection or disconnected peer.pex() # Initiate peer exchange if peer.connection and peer.connection.connected: connected += 1 # Successfully connected if connected >= need: break - self.log.debug( - "Connected before: %s, after: %s. Check site: %s." % - (connected_before, connected, check_site_on_reconnect) - ) if check_site_on_reconnect and connected_before == 0 and connected > 0 and self.connection_server.has_internet: + self.log.debug("Connected before: %s, after: %s. We need to check the site." % (connected_before, connected)) gevent.spawn(self.update, check_files=False) return connected # Return: Probably peers verified to be connectable recently - def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True): - peers = list(self.peers.values()) + def getConnectablePeers(self, need_num=5, ignore=[]): + peers = self.peers.values() found = [] for peer in peers: if peer.key.endswith(":0"): continue # Not connectable if not peer.connection: continue # No connection - if peer.ip.endswith(".onion") and not self.connection_server.tor_manager.enabled: - continue # Onion not supported if peer.key in ignore: continue # The requester has this peer if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago peer.connection = None # Cleanup: Dead connection continue - if not allow_private and helper.isPrivateIp(peer.ip): - continue found.append(peer) if len(found) >= need_num: break # Found requested number of peers - if len(found) < need_num: # Return not that good peers - found += [ - peer for peer in peers - if not peer.key.endswith(":0") and - peer.key not in ignore and - (allow_private or not helper.isPrivateIp(peer.ip)) - ][0:need_num - len(found)] + if need_num > 5 and need_num < 100 and len(found) < need_num: # Return not that good peers + found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num - len(found)] return found # Return: Recently found peers def getRecentPeers(self, need_num): - found = list(set(self.peers_recent)) - self.log.debug( - "Recent peers %s of %s (need: %s)" % - (len(found), len(self.peers), need_num) - ) - - if len(found) >= need_num or len(found) >= len(self.peers): - return sorted( - found, - key=lambda peer: peer.reputation, - reverse=True - )[0:need_num] - - # Add random peers - need_more = need_num - len(found) - if not self.connection_server.tor_manager.enabled: - peers = [peer for peer in self.peers.values() if not peer.ip.endswith(".onion")] - else: - peers = list(self.peers.values()) - - found_more = sorted( - peers[0:need_more * 50], - key=lambda peer: peer.reputation, - reverse=True - )[0:need_more * 2] - - found += found_more - + found = sorted(self.peers.values()[0:need_num*50], key=lambda peer: peer.time_found + peer.reputation * 60, reverse=True)[0:need_num*2] + random.shuffle(found) return found[0:need_num] def getConnectedPeers(self): back = [] - if not self.connection_server: - return [] - - tor_manager = self.connection_server.tor_manager for connection in self.connection_server.connections: if not connection.connected and time.time() - connection.start_time > 20: # Still not connected after 20s continue peer = self.peers.get("%s:%s" % (connection.ip, connection.port)) if peer: - if connection.ip.endswith(".onion") and connection.target_onion and tor_manager.start_onions: - # Check if the connection is made with the onion address created for the site - valid_target_onions = (tor_manager.getOnion(self.address), tor_manager.getOnion("global")) - if connection.target_onion not in valid_target_onions: - continue if not peer.connection: peer.connect(connection) back.append(peer) return back # Cleanup probably dead peers and close connection if too much - def cleanupPeers(self, peers_protected=[]): - peers = list(self.peers.values()) + def cleanupPeers(self): + peers = self.peers.values() if len(peers) > 20: # Cleanup old peers removed = 0 @@ -1002,12 +979,9 @@ class Site(object): need_to_close = len(connected_peers) - config.connected_limit if closed < need_to_close: - # Try to keep connections with more sites - for peer in sorted(connected_peers, key=lambda peer: min(peer.connection.sites, 5)): + for peer in sorted(connected_peers, key=lambda peer: min(peer.connection.sites, 5)): # Try to keep connections with more sites if not peer.connection: continue - if peer.key in peers_protected: - continue if peer.connection.sites > 5: break peer.connection.close("Cleanup peers") @@ -1032,17 +1006,15 @@ class Site(object): if sent >= limit: break if sent: - my_hashfield_changed = self.content_manager.hashfield.time_changed - self.log.debug("Sent my hashfield (chaged %.3fs ago) to %s peers" % (time.time() - my_hashfield_changed, sent)) + self.log.debug("Sent my hashfield to %s peers" % sent) return sent # Update hashfield def updateHashfield(self, limit=5): # Return if no optional files - if not self.content_manager.hashfield and not self.content_manager.has_optional_files: + if not self.content_manager.hashfield and not self.content_manager.contents.get("content.json", {}).get("files_optional"): return False - s = time.time() queried = 0 connected_peers = self.getConnectedPeers() for peer in connected_peers: @@ -1053,7 +1025,7 @@ class Site(object): if queried >= limit: break if queried: - self.log.debug("Queried hashfield from %s peers in %.3fs" % (queried, time.time() - s)) + self.log.debug("Queried hashfield from %s peers" % queried) return queried # Returns if the optional file is need to be downloaded or not @@ -1061,22 +1033,14 @@ class Site(object): return self.settings.get("autodownloadoptional") def delete(self): - self.log.info("Deleting site...") - s = time.time() self.settings["serving"] = False - self.settings["deleting"] = True self.saveSettings() - num_greenlets = self.greenlet_manager.stopGreenlets("Site %s deleted" % self.address) self.worker_manager.running = False - num_workers = self.worker_manager.stopWorkers() - SiteManager.site_manager.delete(self.address) - self.content_manager.contents.db.deleteSite(self) - self.updateWebsocket(deleted=True) + self.worker_manager.stopWorkers() self.storage.deleteFiles() - self.log.info( - "Deleted site in %.3fs (greenlets: %s, workers: %s)" % - (time.time() - s, num_greenlets, num_workers) - ) + self.updateWebsocket() + self.content_manager.contents.db.deleteSite(self) + SiteManager.site_manager.delete(self.address) # - Events - @@ -1094,19 +1058,12 @@ class Site(object): # Send site status update to websocket clients def updateWebsocket(self, **kwargs): if kwargs: - param = {"event": list(kwargs.items())[0]} + param = {"event": kwargs.items()[0]} else: param = None for ws in self.websockets: ws.event("siteChanged", self, param) - def messageWebsocket(self, message, type="info", progress=None): - for ws in self.websockets: - if progress is None: - ws.cmd("notification", [type, message]) - else: - ws.cmd("progress", [type, message, progress]) - # File download started @util.Noparallel(blocking=False) def fileStarted(self): @@ -1123,8 +1080,6 @@ class Site(object): # Update content.json last downlad time if inner_path == "content.json": - if not self.settings.get("downloaded"): - self.settings["downloaded"] = int(time.time()) self.content_updated = time.time() self.updateWebsocket(file_done=inner_path) @@ -1140,8 +1095,5 @@ class Site(object): self.updateWebsocket(file_failed=inner_path) if self.bad_files.get(inner_path, 0) > 30: - self.fileForgot(inner_path) - - def fileForgot(self, inner_path): - self.log.debug("Giving up on %s" % inner_path) - del self.bad_files[inner_path] # Give up after 30 tries + self.log.debug("Giving up on %s" % inner_path) + del self.bad_files[inner_path] # Give up after 30 tries diff --git a/src/Site/SiteAnnouncer.py b/src/Site/SiteAnnouncer.py deleted file mode 100644 index 2fd63e82..00000000 --- a/src/Site/SiteAnnouncer.py +++ /dev/null @@ -1,293 +0,0 @@ -import random -import time -import hashlib -import re -import collections - -import gevent - -from Plugin import PluginManager -from Config import config -from Debug import Debug -from util import helper -from greenlet import GreenletExit -import util - - -class AnnounceError(Exception): - pass - -global_stats = collections.defaultdict(lambda: collections.defaultdict(int)) - - -@PluginManager.acceptPlugins -class SiteAnnouncer(object): - def __init__(self, site): - self.site = site - self.stats = {} - self.fileserver_port = config.fileserver_port - self.peer_id = self.site.connection_server.peer_id - self.last_tracker_id = random.randint(0, 10) - self.time_last_announce = 0 - - def getTrackers(self): - return config.trackers - - def getSupportedTrackers(self): - trackers = self.getTrackers() - - if not self.site.connection_server.tor_manager.enabled: - trackers = [tracker for tracker in trackers if ".onion" not in tracker] - - trackers = [tracker for tracker in trackers if self.getAddressParts(tracker)] # Remove trackers with unknown address - - if "ipv6" not in self.site.connection_server.supported_ip_types: - trackers = [tracker for tracker in trackers if helper.getIpType(self.getAddressParts(tracker)["ip"]) != "ipv6"] - - return trackers - - def getAnnouncingTrackers(self, mode): - trackers = self.getSupportedTrackers() - - if trackers and (mode == "update" or mode == "more"): # Only announce on one tracker, increment the queried tracker id - self.last_tracker_id += 1 - self.last_tracker_id = self.last_tracker_id % len(trackers) - trackers_announcing = [trackers[self.last_tracker_id]] # We only going to use this one - else: - trackers_announcing = trackers - - return trackers_announcing - - def getOpenedServiceTypes(self): - back = [] - # Type of addresses they can reach me - if config.trackers_proxy == "disable" and config.tor != "always": - for ip_type, opened in list(self.site.connection_server.port_opened.items()): - if opened: - back.append(ip_type) - if self.site.connection_server.tor_manager.start_onions: - back.append("onion") - return back - - @util.Noparallel(blocking=False) - def announce(self, force=False, mode="start", pex=True): - if time.time() - self.time_last_announce < 30 and not force: - return # No reannouncing within 30 secs - if force: - self.site.log.debug("Force reannounce in mode %s" % mode) - - self.fileserver_port = config.fileserver_port - self.time_last_announce = time.time() - - trackers = self.getAnnouncingTrackers(mode) - - if config.verbose: - self.site.log.debug("Tracker announcing, trackers: %s" % trackers) - - errors = [] - slow = [] - s = time.time() - threads = [] - num_announced = 0 - - for tracker in trackers: # Start announce threads - tracker_stats = global_stats[tracker] - # Reduce the announce time for trackers that looks unreliable - time_announce_allowed = time.time() - 60 * min(30, tracker_stats["num_error"]) - if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time_announce_allowed and not force: - if config.verbose: - self.site.log.debug("Tracker %s looks unreliable, announce skipped (error: %s)" % (tracker, tracker_stats["num_error"])) - continue - thread = self.site.greenlet_manager.spawn(self.announceTracker, tracker, mode=mode) - threads.append(thread) - thread.tracker = tracker - - time.sleep(0.01) - self.updateWebsocket(trackers="announcing") - - gevent.joinall(threads, timeout=20) # Wait for announce finish - - for thread in threads: - if thread.value is None: - continue - if thread.value is not False: - if thread.value > 1.0: # Takes more than 1 second to announce - slow.append("%.2fs %s" % (thread.value, thread.tracker)) - num_announced += 1 - else: - if thread.ready(): - errors.append(thread.tracker) - else: # Still running - slow.append("30s+ %s" % thread.tracker) - - # Save peers num - self.site.settings["peers"] = len(self.site.peers) - - if len(errors) < len(threads): # At least one tracker finished - if len(trackers) == 1: - announced_to = trackers[0] - else: - announced_to = "%s/%s trackers" % (num_announced, len(threads)) - if mode != "update" or config.verbose: - self.site.log.debug( - "Announced in mode %s to %s in %.3fs, errors: %s, slow: %s" % - (mode, announced_to, time.time() - s, errors, slow) - ) - else: - if len(threads) > 1: - self.site.log.error("Announce to %s trackers in %.3fs, failed" % (len(threads), time.time() - s)) - if len(threads) == 1 and mode != "start": # Move to next tracker - self.site.log.debug("Tracker failed, skipping to next one...") - self.site.greenlet_manager.spawnLater(1.0, self.announce, force=force, mode=mode, pex=pex) - - self.updateWebsocket(trackers="announced") - - if pex: - self.updateWebsocket(pex="announcing") - if mode == "more": # Need more peers - self.announcePex(need_num=10) - else: - self.announcePex() - - self.updateWebsocket(pex="announced") - - def getTrackerHandler(self, protocol): - return None - - def getAddressParts(self, tracker): - if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker): - return None - protocol, address = tracker.split("://", 1) - if ":" in address: - ip, port = address.rsplit(":", 1) - else: - ip = address - if protocol.startswith("https"): - port = 443 - else: - port = 80 - back = {} - back["protocol"] = protocol - back["address"] = address - back["ip"] = ip - back["port"] = port - return back - - def announceTracker(self, tracker, mode="start", num_want=10): - s = time.time() - address_parts = self.getAddressParts(tracker) - if not address_parts: - self.site.log.warning("Tracker %s error: Invalid address" % tracker) - return False - - if tracker not in self.stats: - self.stats[tracker] = {"status": "", "num_request": 0, "num_success": 0, "num_error": 0, "time_request": 0, "time_last_error": 0} - - last_status = self.stats[tracker]["status"] - self.stats[tracker]["status"] = "announcing" - self.stats[tracker]["time_request"] = time.time() - global_stats[tracker]["time_request"] = time.time() - if config.verbose: - self.site.log.debug("Tracker announcing to %s (mode: %s)" % (tracker, mode)) - if mode == "update": - num_want = 10 - else: - num_want = 30 - - handler = self.getTrackerHandler(address_parts["protocol"]) - error = None - try: - if handler: - peers = handler(address_parts["address"], mode=mode, num_want=num_want) - else: - raise AnnounceError("Unknown protocol: %s" % address_parts["protocol"]) - except Exception as err: - self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, Debug.formatException(err), mode)) - error = err - - if error: - self.stats[tracker]["status"] = "error" - self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["last_error"] = str(error) - self.stats[tracker]["time_last_error"] = time.time() - if self.site.connection_server.has_internet: - self.stats[tracker]["num_error"] += 1 - self.stats[tracker]["num_request"] += 1 - global_stats[tracker]["num_request"] += 1 - if self.site.connection_server.has_internet: - global_stats[tracker]["num_error"] += 1 - self.updateWebsocket(tracker="error") - return False - - if peers is None: # Announce skipped - self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["status"] = last_status - return None - - self.stats[tracker]["status"] = "announced" - self.stats[tracker]["time_status"] = time.time() - self.stats[tracker]["num_success"] += 1 - self.stats[tracker]["num_request"] += 1 - global_stats[tracker]["num_request"] += 1 - global_stats[tracker]["num_error"] = 0 - - if peers is True: # Announce success, but no peers returned - return time.time() - s - - # Adding peers - added = 0 - for peer in peers: - if peer["port"] == 1: # Some trackers does not accept port 0, so we send port 1 as not-connectable - peer["port"] = 0 - if not peer["port"]: - continue # Dont add peers with port 0 - if self.site.addPeer(peer["addr"], peer["port"], source="tracker"): - added += 1 - - if added: - self.site.worker_manager.onPeers() - self.site.updateWebsocket(peers_added=added) - - if config.verbose: - self.site.log.debug( - "Tracker result: %s://%s (found %s peers, new: %s, total: %s)" % - (address_parts["protocol"], address_parts["address"], len(peers), added, len(self.site.peers)) - ) - return time.time() - s - - @util.Noparallel(blocking=False) - def announcePex(self, query_num=2, need_num=5): - peers = self.site.getConnectedPeers() - if len(peers) == 0: # Wait 3s for connections - time.sleep(3) - peers = self.site.getConnectedPeers() - - if len(peers) == 0: # Small number of connected peers for this site, connect to any - peers = list(self.site.getRecentPeers(20)) - need_num = 10 - - random.shuffle(peers) - done = 0 - total_added = 0 - for peer in peers: - num_added = peer.pex(need_num=need_num) - if num_added is not False: - done += 1 - total_added += num_added - if num_added: - self.site.worker_manager.onPeers() - self.site.updateWebsocket(peers_added=num_added) - else: - time.sleep(0.1) - if done == query_num: - break - self.site.log.debug("Pex result: from %s peers got %s new peers." % (done, total_added)) - - def updateWebsocket(self, **kwargs): - if kwargs: - param = {"event": list(kwargs.items())[0]} - else: - param = None - - for ws in self.site.websockets: - ws.event("announcerChanged", self.site, param) diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index 684d69fc..f3d63fda 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -7,13 +7,10 @@ import atexit import gevent -import util from Plugin import PluginManager from Content import ContentDb from Config import config from util import helper -from util import RateLimit -from util import Cached @PluginManager.acceptPlugins @@ -21,87 +18,59 @@ class SiteManager(object): def __init__(self): self.log = logging.getLogger("SiteManager") self.log.debug("SiteManager created.") - self.sites = {} - self.sites_changed = int(time.time()) + self.sites = None self.loaded = False gevent.spawn(self.saveTimer) - atexit.register(lambda: self.save(recalculate_size=True)) + atexit.register(self.save) # Load all sites from data/sites.json - @util.Noparallel() - def load(self, cleanup=True, startup=False): - from Debug import Debug - self.log.info("Loading sites... (cleanup: %s, startup: %s)" % (cleanup, startup)) + def load(self, cleanup=True): + self.log.debug("Loading sites...") self.loaded = False - from .Site import Site + from Site import Site + if self.sites is None: + self.sites = {} address_found = [] added = 0 - load_s = time.time() # Load new adresses - try: - json_path = "%s/sites.json" % config.data_dir - data = json.load(open(json_path)) - except Exception as err: - raise Exception("Unable to load %s: %s" % (json_path, err)) - - sites_need = [] - - for address, settings in data.items(): - if address not in self.sites: - if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): - # Root content.json exists, try load site - s = time.time() - try: - site = Site(address, settings=settings) - site.content_manager.contents.get("content.json") - except Exception as err: - self.log.debug("Error loading site %s: %s" % (address, err)) - continue - self.sites[address] = site - self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s)) - added += 1 - elif startup: - # No site directory, start download - self.log.debug("Found new site in sites.json: %s" % address) - sites_need.append([address, settings]) - added += 1 - + for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems(): + if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): + s = time.time() + try: + site = Site(address, settings=settings) + site.content_manager.contents.get("content.json") + except Exception, err: + self.log.debug("Error loading site %s: %s" % (address, err)) + continue + self.sites[address] = site + self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s)) + added += 1 address_found.append(address) # Remove deleted adresses if cleanup: - for address in list(self.sites.keys()): + for address in self.sites.keys(): if address not in address_found: del(self.sites[address]) self.log.debug("Removed site: %s" % address) # Remove orpan sites from contentdb content_db = ContentDb.getContentDb() - for row in content_db.execute("SELECT * FROM site").fetchall(): + for row in content_db.execute("SELECT * FROM site"): address = row["address"] - if address not in self.sites and address not in address_found: + if address not in self.sites: self.log.info("Deleting orphan site from content.db: %s" % address) - - try: - content_db.execute("DELETE FROM site WHERE ?", {"address": address}) - except Exception as err: - self.log.error("Can't delete site %s from content_db: %s" % (address, err)) - + content_db.execute("DELETE FROM site WHERE ?", {"address": address}) if address in content_db.site_ids: del content_db.site_ids[address] if address in content_db.sites: del content_db.sites[address] - self.loaded = True - for address, settings in sites_need: - gevent.spawn(self.need, address, settings=settings) if added: - self.log.info("Added %s sites in %.3fs" % (added, time.time() - load_s)) + self.log.debug("SiteManager added %s sites" % added) + self.loaded = True - def saveDelayed(self): - RateLimit.callAsync("Save sites.json", allowed_again=5, func=self.save) - - def save(self, recalculate_size=False): + def save(self): if not self.sites: self.log.debug("Save skipped: No sites found") return @@ -111,116 +80,79 @@ class SiteManager(object): s = time.time() data = {} # Generate data file - s = time.time() - for address, site in list(self.list().items()): - if recalculate_size: - site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size + for address, site in self.list().iteritems(): + site.settings["size"] = site.content_manager.getTotalSize() # Update site size data[address] = site.settings - data[address]["cache"] = site.getSettingsCache() - time_generate = time.time() - s + data[address]["cache"] = {} + data[address]["cache"]["bad_files"] = site.bad_files + data[address]["cache"]["hashfield"] = site.content_manager.hashfield.tostring().encode("base64") - s = time.time() if data: - helper.atomicWrite("%s/sites.json" % config.data_dir, helper.jsonDumps(data).encode("utf8")) + helper.atomicWrite("%s/sites.json" % config.data_dir, json.dumps(data, indent=2, sort_keys=True)) else: self.log.debug("Save error: No data") - time_write = time.time() - s - # Remove cache from site settings - for address, site in self.list().items(): + for address, site in self.list().iteritems(): site.settings["cache"] = {} - self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write)) + self.log.debug("Saved sites in %.2fs" % (time.time() - s)) def saveTimer(self): while 1: time.sleep(60 * 10) - self.save(recalculate_size=True) + self.save() # Checks if its a valid address def isAddress(self, address): return re.match("^[A-Za-z0-9]{26,35}$", address) - def isDomain(self, address): - return False - - @Cached(timeout=10) - def isDomainCached(self, address): - return self.isDomain(address) - - def resolveDomain(self, domain): - return False - - @Cached(timeout=10) - def resolveDomainCached(self, domain): - return self.resolveDomain(domain) - # Return: Site object or None if not found def get(self, address): - if self.isDomainCached(address): - address_resolved = self.resolveDomainCached(address) - if address_resolved: - address = address_resolved - - if not self.loaded: # Not loaded yet - self.log.debug("Loading site: %s)..." % address) + if self.sites is None: # Not loaded yet + self.log.debug("Getting new site: %s)..." % address) self.load() - site = self.sites.get(address) - - return site - - def add(self, address, all_file=True, settings=None, **kwargs): - from .Site import Site - self.sites_changed = int(time.time()) - # Try to find site with differect case - for recover_address, recover_site in list(self.sites.items()): - if recover_address.lower() == address.lower(): - return recover_site - - if not self.isAddress(address): - return False # Not address: %s % address - self.log.debug("Added new site: %s" % address) - config.loadTrackersFile() - site = Site(address, settings=settings) - self.sites[address] = site - if not site.settings["serving"]: # Maybe it was deleted before - site.settings["serving"] = True - site.saveSettings() - if all_file: # Also download user files on first sync - site.download(check_size=True, blind_includes=True) - return site + return self.sites.get(address) # Return or create site and start download site files - def need(self, address, *args, **kwargs): - if self.isDomainCached(address): - address_resolved = self.resolveDomainCached(address) - if address_resolved: - address = address_resolved - + def need(self, address, all_file=True): + from Site import Site site = self.get(address) if not site: # Site not exist yet - site = self.add(address, *args, **kwargs) + # Try to find site with differect case + for recover_address, recover_site in self.sites.items(): + if recover_address.lower() == address.lower(): + return recover_site + + if not self.isAddress(address): + return False # Not address: %s % address + self.log.debug("Added new site: %s" % address) + site = Site(address) + self.sites[address] = site + if not site.settings["serving"]: # Maybe it was deleted before + site.settings["serving"] = True + site.saveSettings() + if all_file: # Also download user files on first sync + site.download(check_size=True, blind_includes=True) + else: + if all_file: + site.download(check_size=True, blind_includes=True) + return site def delete(self, address): - self.sites_changed = int(time.time()) - self.log.debug("Deleted site: %s" % address) + self.log.debug("SiteManager deleted site: %s" % address) del(self.sites[address]) # Delete from sites.json self.save() # Lazy load sites def list(self): - if not self.loaded: # Not loaded yet + if self.sites is None: # Not loaded yet self.log.debug("Sites not loaded yet...") - self.load(startup=True) + self.load() return self.sites site_manager = SiteManager() # Singletone -if config.action == "main": # Don't connect / add myself to peerlist - peer_blacklist = [("127.0.0.1", config.fileserver_port), ("::1", config.fileserver_port)] -else: - peer_blacklist = [] - +peer_blacklist = [("127.0.0.1", config.fileserver_port)] # Dont add this peers diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py index 27032e79..fd9d27e5 100644 --- a/src/Site/SiteStorage.py +++ b/src/Site/SiteStorage.py @@ -3,33 +3,23 @@ import re import shutil import json import time -import errno -from collections import defaultdict +import sys import sqlite3 import gevent.event -import util -from util import SafeRe -from Db.Db import Db +from Db import Db from Debug import Debug from Config import config from util import helper -from util import ThreadPool from Plugin import PluginManager -from Translate import translate as _ - - -thread_pool_fs_read = ThreadPool.ThreadPool(config.threads_fs_read, name="FS read") -thread_pool_fs_write = ThreadPool.ThreadPool(config.threads_fs_write, name="FS write") -thread_pool_fs_batch = ThreadPool.ThreadPool(1, name="FS batch") @PluginManager.acceptPlugins class SiteStorage(object): def __init__(self, site, allow_create=True): self.site = site - self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory + self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir self.log = site.log self.db = None # Db class @@ -43,233 +33,140 @@ class SiteStorage(object): else: raise Exception("Directory not exists: %s" % self.directory) - def getDbFile(self): - if self.db: - return self.db.schema["db_file"] - else: - if self.isFile("dbschema.json"): - schema = self.loadJson("dbschema.json") - return schema["db_file"] - else: - return False + # Load db from dbschema.json + def openDb(self, check=True): + try: + schema = self.loadJson("dbschema.json") + db_path = self.getPath(schema["db_file"]) + except Exception, err: + raise Exception("dbschema.json is not a valid JSON: %s" % err) - # Create new databaseobject with the site's schema - def openDb(self, close_idle=False): - schema = self.getDbSchema() - db_path = self.getPath(schema["db_file"]) - return Db(schema, db_path, close_idle=close_idle) + if check: + if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null + self.rebuildDb() - def closeDb(self, reason="Unknown (SiteStorage)"): + if not self.db: + self.db = Db(schema, db_path) + + if check and not self.db_checked: + changed_tables = self.db.checkTables() + if changed_tables: + self.rebuildDb(delete_db=False) # TODO: only update the changed table datas + + def closeDb(self): if self.db: - self.db.close(reason) + self.db.close() self.event_db_busy = None self.db = None - def getDbSchema(self): - try: - self.site.needFile("dbschema.json") - schema = self.loadJson("dbschema.json") - except Exception as err: - raise Exception("dbschema.json is not a valid JSON: %s" % err) - return schema - - def loadDb(self): - self.log.debug("No database, waiting for dbschema.json...") - self.site.needFile("dbschema.json", priority=3) - self.log.debug("Got dbschema.json") - self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist - if self.has_db: - schema = self.getDbSchema() - db_path = self.getPath(schema["db_file"]) - if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: - try: - self.rebuildDb(reason="Missing database") - except Exception as err: - self.log.error(err) - pass - - if self.db: - self.db.close("Gettig new db for SiteStorage") - self.db = self.openDb(close_idle=True) - try: - changed_tables = self.db.checkTables() - if changed_tables: - self.rebuildDb(delete_db=False, reason="Changed tables") # TODO: only update the changed table datas - except sqlite3.OperationalError: - pass - # Return db class - @util.Noparallel() def getDb(self): - if self.event_db_busy: # Db not ready for queries - self.log.debug("Wating for db...") - self.event_db_busy.get() # Wait for event if not self.db: - self.loadDb() + self.log.debug("No database, waiting for dbschema.json...") + self.site.needFile("dbschema.json", priority=3) + self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist + if self.has_db: + self.openDb() return self.db def updateDbFile(self, inner_path, file=None, cur=None): path = self.getPath(inner_path) - if cur: - db = cur.db - else: - db = self.getDb() - return db.updateJson(path, file, cur) + return self.getDb().updateJson(path, file, cur) # Return possible db files for the site - @thread_pool_fs_read.wrap def getDbFiles(self): - found = 0 - for content_inner_path, content in self.site.content_manager.contents.items(): + for content_inner_path, content in self.site.content_manager.contents.iteritems(): # content.json file itself if self.isFile(content_inner_path): - yield content_inner_path, self.getPath(content_inner_path) + yield content_inner_path, self.open(content_inner_path) else: - self.log.debug("[MISSING] %s" % content_inner_path) + self.log.error("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in list(content.get("files", {}).keys()) + list(content.get("files_optional", {}).keys()): - if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"): + for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + if not file_relative_path.endswith(".json"): continue # We only interesed in json files file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / if self.isFile(file_inner_path): - yield file_inner_path, self.getPath(file_inner_path) + yield file_inner_path, self.open(file_inner_path) else: - self.log.debug("[MISSING] %s" % file_inner_path) - found += 1 - if found % 100 == 0: - time.sleep(0.001) # Context switch to avoid UI block + self.log.error("[MISSING] %s" % file_inner_path) # Rebuild sql cache - @util.Noparallel() - @thread_pool_fs_batch.wrap - def rebuildDb(self, delete_db=True, reason="Unknown"): - self.log.info("Rebuilding db (reason: %s)..." % reason) + def rebuildDb(self, delete_db=True): self.has_db = self.isFile("dbschema.json") if not self.has_db: return False - + self.event_db_busy = gevent.event.AsyncResult() schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path) and delete_db: if self.db: - self.closeDb("rebuilding") # Close db if open + self.db.close() # Close db if open time.sleep(0.5) self.log.info("Deleting %s" % db_path) try: os.unlink(db_path) - except Exception as err: + except Exception, err: self.log.error("Delete error: %s" % err) - - if not self.db: - self.db = self.openDb() - self.event_db_busy = gevent.event.AsyncResult() - - self.log.info("Rebuild: Creating tables...") - - # raise DbTableError if not valid + self.db = None + self.openDb(check=False) + self.log.info("Creating tables...") self.db.checkTables() - + self.log.info("Importing data...") cur = self.db.getCursor() + cur.execute("BEGIN") cur.logging = False + found = 0 s = time.time() - self.log.info("Rebuild: Getting db files...") - db_files = list(self.getDbFiles()) - num_imported = 0 - num_total = len(db_files) - num_error = 0 - - self.log.info("Rebuild: Importing data...") try: - if num_total > 100: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - "0000", num_total, num_error - ), "rebuild", 0 - ) - for file_inner_path, file_path in db_files: + for file_inner_path, file in self.getDbFiles(): try: - if self.updateDbFile(file_inner_path, file=open(file_path, "rb"), cur=cur): - num_imported += 1 - except Exception as err: + if self.updateDbFile(file_inner_path, file=file, cur=cur): + found += 1 + except Exception, err: self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err))) - num_error += 1 - - if num_imported and num_imported % 100 == 0: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - num_imported, num_total, num_error - ), - "rebuild", int(float(num_imported) / num_total * 100) - ) - time.sleep(0.001) # Context switch to avoid UI block finally: - cur.close() - if num_total > 100: - self.site.messageWebsocket( - _["Database rebuilding...
    Imported {0} of {1} files (error: {2})..."].format( - num_imported, num_total, num_error - ), "rebuild", 100 - ) - self.log.info("Rebuild: Imported %s data file in %.3fs" % (num_imported, time.time() - s)) + cur.execute("END") + self.log.info("Imported %s data file in %ss" % (found, time.time() - s)) self.event_db_busy.set(True) # Event done, notify waiters self.event_db_busy = None # Clear event - self.db.commit("Rebuilt") - - return True # Execute sql query or rebuild on dberror def query(self, query, params=None): - if not query.strip().upper().startswith("SELECT"): - raise Exception("Only SELECT query supported") - + if self.event_db_busy: # Db not ready for queries + self.log.debug("Wating for db...") + self.event_db_busy.get() # Wait for event try: res = self.getDb().execute(query, params) - except sqlite3.DatabaseError as err: + except sqlite3.DatabaseError, err: if err.__class__.__name__ == "DatabaseError": self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query)) - try: - self.rebuildDb(reason="Query error") - except sqlite3.OperationalError: - pass + self.rebuildDb() res = self.db.cur.execute(query, params) else: raise err return res - def ensureDir(self, inner_path): - try: - os.makedirs(self.getPath(inner_path)) - except OSError as err: - if err.errno == errno.EEXIST: - return False - else: - raise err - return True + # Open file object + def open(self, inner_path, mode="rb"): + return open(self.getPath(inner_path), mode) # Open file object - def open(self, inner_path, mode="rb", create_dirs=False, **kwargs): - file_path = self.getPath(inner_path) - if create_dirs: - file_inner_dir = os.path.dirname(inner_path) - self.ensureDir(file_inner_dir) - return open(file_path, mode, **kwargs) + def read(self, inner_path, mode="r"): + return open(self.getPath(inner_path), mode).read() - # Open file object - @thread_pool_fs_read.wrap - def read(self, inner_path, mode="rb"): - return self.open(inner_path, mode).read() - - @thread_pool_fs_write.wrap - def writeThread(self, inner_path, content): + # Write content to file + def write(self, inner_path, content): file_path = self.getPath(inner_path) # Create dir if not exist - self.ensureDir(os.path.dirname(inner_path)) + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) # Write file if hasattr(content, 'read'): # File-like object - with open(file_path, "wb") as file: shutil.copyfileobj(content, file) # Write buff to disk else: # Simple string @@ -278,10 +175,7 @@ class SiteStorage(object): else: with open(file_path, "wb") as file: file.write(content) - - # Write content to file - def write(self, inner_path, content): - self.writeThread(inner_path, content) + del content self.onUpdated(inner_path) # Remove file from filesystem @@ -296,53 +190,30 @@ class SiteStorage(object): def rename(self, inner_path_before, inner_path_after): for retry in range(3): - rename_err = None # To workaround "The process cannot access the file beacause it is being used by another process." error try: os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after)) + err = None break - except Exception as err: - rename_err = err + except Exception, err: self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry)) time.sleep(0.1 + retry) - if rename_err: - raise rename_err + if err: + raise err # List files from a directory - @thread_pool_fs_read.wrap - def walk(self, dir_inner_path, ignore=None): + def walk(self, dir_inner_path): directory = self.getPath(dir_inner_path) for root, dirs, files in os.walk(directory): root = root.replace("\\", "/") root_relative_path = re.sub("^%s" % re.escape(directory), "", root).lstrip("/") for file_name in files: if root_relative_path: # Not root dir - file_relative_path = root_relative_path + "/" + file_name + yield root_relative_path + "/" + file_name else: - file_relative_path = file_name - - if ignore and SafeRe.match(ignore, file_relative_path): - continue - - yield file_relative_path - - # Don't scan directory that is in the ignore pattern - if ignore: - dirs_filtered = [] - for dir_name in dirs: - if root_relative_path: - dir_relative_path = root_relative_path + "/" + dir_name - else: - dir_relative_path = dir_name - - if ignore == ".*" or re.match(".*([|(]|^)%s([|)]|$)" % re.escape(dir_relative_path + "/.*"), ignore): - continue - - dirs_filtered.append(dir_name) - dirs[:] = dirs_filtered + yield file_name # list directories in a directory - @thread_pool_fs_read.wrap def list(self, dir_inner_path): directory = self.getPath(dir_inner_path) return os.listdir(directory) @@ -350,43 +221,51 @@ class SiteStorage(object): # Site content updated def onUpdated(self, inner_path, file=None): # Update Sql cache - should_load_to_db = inner_path.endswith(".json") or inner_path.endswith(".json.gz") if inner_path == "dbschema.json": self.has_db = self.isFile("dbschema.json") # Reopen DB to check changes if self.has_db: - self.closeDb("New dbschema") - gevent.spawn(self.getDb) - elif not config.disable_db and should_load_to_db and self.has_db: # Load json file to db + self.closeDb() + self.openDb() + elif not config.disable_db and inner_path.endswith(".json") and self.has_db: # Load json file to db if config.verbose: self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file)) try: self.updateDbFile(inner_path, file) - except Exception as err: + except Exception, err: self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err))) - self.closeDb("Json load error") + self.closeDb() # Load and parse json file - @thread_pool_fs_read.wrap def loadJson(self, inner_path): - try: - with self.open(inner_path, "r", encoding="utf8") as file: - return json.load(file) - except Exception as err: - self.log.warning("Json load error: %s" % Debug.formatException(err)) - return None + with self.open(inner_path) as file: + return json.load(file) # Write formatted json file def writeJson(self, inner_path, data): + content = json.dumps(data, indent=1, sort_keys=True) + + # Make it a little more compact by removing unnecessary white space + def compact_dict(match): + if "\n" in match.group(0): + return match.group(0).replace(match.group(1), match.group(1).strip()) + else: + return match.group(0) + + content = re.sub("\{(\n[^,\[\{]{10,100}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL) + + # Remove end of line whitespace + content = re.sub("(?m)[ ]+$", "", content) + # Write to disk - self.write(inner_path, helper.jsonDumps(data).encode("utf8")) + self.write(inner_path, content) # Get file size def getSize(self, inner_path): path = self.getPath(inner_path) try: return os.path.getsize(path) - except Exception: + except: return 0 # File exist @@ -407,110 +286,82 @@ class SiteStorage(object): if not inner_path: return self.directory - if "../" in inner_path: - raise Exception("File not allowed: %s" % inner_path) + if ".." in inner_path: + raise Exception(u"File not allowed: %s" % inner_path) - return "%s/%s" % (self.directory, inner_path) + return u"%s/%s" % (self.directory, inner_path) # Get site dir relative path def getInnerPath(self, path): if path == self.directory: inner_path = "" else: - if path.startswith(self.directory): - inner_path = path[len(self.directory) + 1:] - else: - raise Exception("File not allowed: %s" % path) + inner_path = re.sub("^%s/" % re.escape(self.directory), "", path) return inner_path # Verify all files sha512sum using content.json def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True): bad_files = [] - back = defaultdict(int) - back["bad_files"] = bad_files i = 0 - self.log.debug("Verifing files...") if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first self.log.debug("VerifyFile content.json not exists") self.site.needFile("content.json", update=True) # Force update to fix corrupt file self.site.content_manager.loadContent() # Reload content.json - for content_inner_path, content in list(self.site.content_manager.contents.items()): - back["num_content"] += 1 + for content_inner_path, content in self.site.content_manager.contents.items(): i += 1 if i % 50 == 0: - time.sleep(0.001) # Context switch to avoid gevent hangs + time.sleep(0.0001) # Context switch to avoid gevent hangs if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file - back["num_content_missing"] += 1 self.log.debug("[MISSING] %s" % content_inner_path) bad_files.append(content_inner_path) - for file_relative_path in list(content.get("files", {}).keys()): - back["num_file"] += 1 + for file_relative_path in content.get("files", {}).keys(): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) if not os.path.isfile(file_path): - back["num_file_missing"] += 1 self.log.debug("[MISSING] %s" % file_inner_path) bad_files.append(file_inner_path) continue if quick_check: ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"] - if not ok: - err = "Invalid size" else: - try: - ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception as _err: - err = _err - ok = False + ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) if not ok: - back["num_file_invalid"] += 1 - self.log.debug("[INVALID] %s: %s" % (file_inner_path, err)) + self.log.debug("[CHANGED] %s" % file_inner_path) if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files bad_files.append(file_inner_path) # Optional files optional_added = 0 optional_removed = 0 - for file_relative_path in list(content.get("files_optional", {}).keys()): - back["num_optional"] += 1 + for file_relative_path in content.get("files_optional", {}).keys(): file_node = content["files_optional"][file_relative_path] file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) - hash_id = self.site.content_manager.hashfield.getHashId(file_node["sha512"]) if not os.path.isfile(file_path): - if self.site.content_manager.isDownloaded(file_inner_path, hash_id): - back["num_optional_removed"] += 1 - self.log.debug("[OPTIONAL MISSING] %s" % file_inner_path) - self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"]) - if add_optional and self.site.isDownloadable(file_inner_path): - self.log.debug("[OPTIONAL ADDING] %s" % file_inner_path) + if self.site.content_manager.hashfield.hasHash(file_node["sha512"]): + self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"]) + if add_optional: bad_files.append(file_inner_path) continue if quick_check: ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"] else: - try: - ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) - except Exception as err: - ok = False + ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) if ok: - if not self.site.content_manager.isDownloaded(file_inner_path, hash_id): - back["num_optional_added"] += 1 - self.site.content_manager.optionalDownloaded(file_inner_path, hash_id, file_node["size"]) + if not self.site.content_manager.hashfield.hasHash(file_node["sha512"]): + self.site.content_manager.optionalDownloaded(file_inner_path, file_node["sha512"], file_node["size"]) optional_added += 1 - self.log.debug("[OPTIONAL FOUND] %s" % file_inner_path) else: - if self.site.content_manager.isDownloaded(file_inner_path, hash_id): - back["num_optional_removed"] += 1 - self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"]) + if self.site.content_manager.hashfield.hasHash(file_node["sha512"]): + self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"]) optional_removed += 1 bad_files.append(file_inner_path) self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path) @@ -521,19 +372,17 @@ class SiteStorage(object): (content_inner_path, len(content["files"]), quick_check, optional_added, optional_removed) ) - self.site.content_manager.contents.db.processDelayed() - time.sleep(0.001) # Context switch to avoid gevent hangs - return back + time.sleep(0.0001) # Context switch to avoid gevent hangs + return bad_files # Check and try to fix site files integrity def updateBadFiles(self, quick_check=True): s = time.time() - res = self.verifyFiles( + bad_files = self.verifyFiles( quick_check, - add_optional=True, + add_optional=self.site.isDownloadable(""), add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned ) - bad_files = res["bad_files"] self.site.bad_files = {} if bad_files: for bad_file in bad_files: @@ -541,96 +390,58 @@ class SiteStorage(object): self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check)) # Delete site's all file - @thread_pool_fs_batch.wrap def deleteFiles(self): - site_title = self.site.content_manager.contents.get("content.json", {}).get("title", self.site.address) - message_id = "delete-%s" % self.site.address - self.log.debug("Deleting files from content.json (title: %s)..." % site_title) - + self.log.debug("Deleting files from content.json...") files = [] # Get filenames - content_inner_paths = list(self.site.content_manager.contents.keys()) - for i, content_inner_path in enumerate(content_inner_paths): - content = self.site.content_manager.contents.get(content_inner_path, {}) + for content_inner_path in self.site.content_manager.contents.keys(): + content = self.site.content_manager.contents[content_inner_path] files.append(content_inner_path) # Add normal files - for file_relative_path in list(content.get("files", {}).keys()): + for file_relative_path in content.get("files", {}).keys(): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir files.append(file_inner_path) # Add optional files - for file_relative_path in list(content.get("files_optional", {}).keys()): + for file_relative_path in content.get("files_optional", {}).keys(): file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir files.append(file_inner_path) - if i % 100 == 0: - num_files = len(files) - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Collected {num_files} files"), - message_id, (i / len(content_inner_paths)) * 25 - ) - if self.isFile("dbschema.json"): self.log.debug("Deleting db file...") - self.closeDb("Deleting site") + self.closeDb() self.has_db = False try: schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path): os.unlink(db_path) - except Exception as err: + except Exception, err: self.log.error("Db file delete error: %s" % err) - num_files = len(files) - for i, inner_path in enumerate(files): + for inner_path in files: path = self.getPath(inner_path) if os.path.isfile(path): for retry in range(5): try: os.unlink(path) break - except Exception as err: - self.log.error("Error removing %s: %s, try #%s" % (inner_path, err, retry)) + except Exception, err: + self.log.error("Error removing %s: %s, try #%s" % (path, err, retry)) time.sleep(float(retry) / 10) - if i % 100 == 0: - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Deleting file {i}/{num_files}"), - message_id, 25 + (i / num_files) * 50 - ) self.onUpdated(inner_path, False) self.log.debug("Deleting empty dirs...") - i = 0 for root, dirs, files in os.walk(self.directory, topdown=False): for dir in dirs: path = os.path.join(root, dir) - if os.path.isdir(path): - try: - i += 1 - if i % 100 == 0: - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Deleting empty directories {i}"), - message_id, 85 - ) - os.rmdir(path) - except OSError: # Not empty - pass - + if os.path.isdir(path) and os.listdir(path) == []: + os.removedirs(path) + self.log.debug("Removing %s" % path) if os.path.isdir(self.directory) and os.listdir(self.directory) == []: - os.rmdir(self.directory) # Remove sites directory if empty + os.removedirs(self.directory) # Remove sites directory if empty if os.path.isdir(self.directory): self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory) - self.site.messageWebsocket( - _("Deleting site {site_title}...
    Site deleted, but some unknown files left in the directory"), - message_id, 100 - ) return False # Some files not deleted else: - self.log.debug("Site %s data directory deleted: %s..." % (site_title, self.directory)) - - self.site.messageWebsocket( - _("Deleting site {site_title}...
    All files deleted successfully"), - message_id, 100 - ) - + self.log.debug("Site data directory deleted: %s..." % self.directory) return True # All clean diff --git a/src/Site/__init__.py b/src/Site/__init__.py index e69de29b..cc830ae8 100644 --- a/src/Site/__init__.py +++ b/src/Site/__init__.py @@ -0,0 +1,2 @@ +from Site import Site +from SiteStorage import SiteStorage \ No newline at end of file diff --git a/src/Test/BenchmarkSsl.py b/src/Test/BenchmarkSsl.py index 06181b89..06b18537 100644 --- a/src/Test/BenchmarkSsl.py +++ b/src/Test/BenchmarkSsl.py @@ -8,7 +8,7 @@ import socket import ssl sys.path.append(os.path.abspath("..")) # Imports relative to src dir -import io as StringIO +import cStringIO as StringIO import gevent from gevent.server import StreamServer @@ -46,8 +46,8 @@ def handle(sock_raw, addr): ) else: sock.sendall(data) - except Exception as err: - print(err) + except Exception, err: + print err try: sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -102,7 +102,7 @@ def getData(): total_num += 1 total_bytes += buff.tell() if not data: - print("No data") + print "No data" sock.shutdown(gevent.socket.SHUT_WR) sock.close() @@ -119,8 +119,8 @@ def info(): else: memory_info = process.get_memory_info while 1: - print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, end=' ') - print("using", clipher, "Mem:", memory_info()[0] / float(2 ** 20)) + print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s, + print "using", clipher, "Mem:", memory_info()[0] / float(2 ** 20) time.sleep(1) gevent.spawn(info) @@ -132,7 +132,7 @@ for test in range(1): gevent.joinall(clients) -print(total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s) +print total_num, "req", (total_bytes / 1024), "kbytes", "transfered in", time.time() - s # Separate client/server process: # 10*10*100: diff --git a/src/Test/Spy.py b/src/Test/Spy.py index 44422550..c017dea9 100644 --- a/src/Test/Spy.py +++ b/src/Test/Spy.py @@ -1,23 +1,17 @@ -import logging - class Spy: - def __init__(self, obj, func_name): - self.obj = obj - self.__name__ = func_name - self.func_original = getattr(self.obj, func_name) - self.calls = [] + def __init__(self, obj, func_name): + self.obj = obj + self.func_name = func_name + self.func_original = getattr(self.obj, func_name) + self.calls = [] - def __enter__(self, *args, **kwargs): - logging.debug("Spy started") - def loggedFunc(cls, *args, **kwargs): - call = dict(enumerate(args, 1)) - call[0] = cls - call.update(kwargs) - logging.debug("Spy call: %s" % call) - self.calls.append(call) - return self.func_original(cls, *args, **kwargs) - setattr(self.obj, self.__name__, loggedFunc) - return self.calls + def __enter__(self, *args, **kwargs): + def loggedFunc(cls, *args, **kwags): + print "Logging", self, args, kwargs + self.calls.append(args) + return self.func_original(cls, *args, **kwargs) + setattr(self.obj, self.func_name, loggedFunc) + return self.calls - def __exit__(self, *args, **kwargs): - setattr(self.obj, self.__name__, self.func_original) \ No newline at end of file + def __exit__(self, *args, **kwargs): + setattr(self.obj, self.func_name, self.func_original) \ No newline at end of file diff --git a/src/Test/TestCached.py b/src/Test/TestCached.py deleted file mode 100644 index 088962c0..00000000 --- a/src/Test/TestCached.py +++ /dev/null @@ -1,59 +0,0 @@ -import time - -from util import Cached - - -class CachedObject: - def __init__(self): - self.num_called_add = 0 - self.num_called_multiply = 0 - self.num_called_none = 0 - - @Cached(timeout=1) - def calcAdd(self, a, b): - self.num_called_add += 1 - return a + b - - @Cached(timeout=1) - def calcMultiply(self, a, b): - self.num_called_multiply += 1 - return a * b - - @Cached(timeout=1) - def none(self): - self.num_called_none += 1 - return None - - -class TestCached: - def testNoneValue(self): - cached_object = CachedObject() - assert cached_object.none() is None - assert cached_object.none() is None - assert cached_object.num_called_none == 1 - time.sleep(2) - assert cached_object.none() is None - assert cached_object.num_called_none == 2 - - def testCall(self): - cached_object = CachedObject() - - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcMultiply(1, 2) == 2 - assert cached_object.calcMultiply(1, 2) == 2 - assert cached_object.num_called_add == 1 - assert cached_object.num_called_multiply == 1 - - assert cached_object.calcAdd(2, 3) == 5 - assert cached_object.calcAdd(2, 3) == 5 - assert cached_object.num_called_add == 2 - - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.calcMultiply(2, 3) == 6 - assert cached_object.num_called_add == 2 - assert cached_object.num_called_multiply == 2 - - time.sleep(2) - assert cached_object.calcAdd(1, 2) == 3 - assert cached_object.num_called_add == 3 diff --git a/src/Test/TestConnectionServer.py b/src/Test/TestConnectionServer.py index 82ee605c..7492925f 100644 --- a/src/Test/TestConnectionServer.py +++ b/src/Test/TestConnectionServer.py @@ -1,66 +1,43 @@ import time -import socket import gevent import pytest -import mock from Crypt import CryptConnection from Connection import ConnectionServer -from Config import config @pytest.mark.usefixtures("resetSettings") class TestConnection: - def testIpv6(self, file_server6): - assert ":" in file_server6.ip + def testSslConnection(self, file_server): + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) + assert file_server != client - client = ConnectionServer(file_server6.ip, 1545) - connection = client.getConnection(file_server6.ip, 1544) - - assert connection.ping() + # Connect to myself + connection = client.getConnection("127.0.0.1", 1544) + assert len(file_server.connections) == 1 + assert len(file_server.ips) == 1 + assert connection.handshake + assert connection.crypt # Close connection connection.close() client.stop() time.sleep(0.01) - assert len(file_server6.connections) == 0 - - # Should not able to reach on ipv4 ip - with pytest.raises(socket.error) as err: - client = ConnectionServer("127.0.0.1", 1545) - connection = client.getConnection("127.0.0.1", 1544) - - def testSslConnection(self, file_server): - client = ConnectionServer(file_server.ip, 1545) - assert file_server != client - - # Connect to myself - with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips - connection = client.getConnection(file_server.ip, 1544) - - assert len(file_server.connections) == 1 - assert connection.handshake - assert connection.crypt - - - # Close connection - connection.close("Test ended") - client.stop() - time.sleep(0.1) assert len(file_server.connections) == 0 - assert file_server.num_incoming == 2 # One for file_server fixture, one for the test + assert len(file_server.ips) == 0 def testRawConnection(self, file_server): - client = ConnectionServer(file_server.ip, 1545) + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) assert file_server != client # Remove all supported crypto crypt_supported_bk = CryptConnection.manager.crypt_supported CryptConnection.manager.crypt_supported = [] - with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) assert len(file_server.connections) == 1 assert not connection.crypt @@ -74,8 +51,9 @@ class TestConnection: CryptConnection.manager.crypt_supported = crypt_supported_bk def testPing(self, file_server, site): - client = ConnectionServer(file_server.ip, 1545) - connection = client.getConnection(file_server.ip, 1544) + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) + connection = client.getConnection("127.0.0.1", 1544) assert connection.ping() @@ -83,36 +61,38 @@ class TestConnection: client.stop() def testGetConnection(self, file_server): - client = ConnectionServer(file_server.ip, 1545) - connection = client.getConnection(file_server.ip, 1544) + file_server.ip_incoming = {} # Reset flood protection + client = ConnectionServer("127.0.0.1", 1545) + connection = client.getConnection("127.0.0.1", 1544) # Get connection by ip/port - connection2 = client.getConnection(file_server.ip, 1544) + connection2 = client.getConnection("127.0.0.1", 1544) assert connection == connection2 # Get connection by peerid - assert not client.getConnection(file_server.ip, 1544, peer_id="notexists", create=False) - connection2 = client.getConnection(file_server.ip, 1544, peer_id=connection.handshake["peer_id"], create=False) + assert not client.getConnection("127.0.0.1", 1544, peer_id="notexists", create=False) + connection2 = client.getConnection("127.0.0.1", 1544, peer_id=connection.handshake["peer_id"], create=False) assert connection2 == connection connection.close() client.stop() def testFloodProtection(self, file_server): + file_server.ip_incoming = {} # Reset flood protection whitelist = file_server.whitelist # Save for reset file_server.whitelist = [] # Disable 127.0.0.1 whitelist - client = ConnectionServer(file_server.ip, 1545) + client = ConnectionServer("127.0.0.1", 1545) # Only allow 6 connection in 1 minute for reconnect in range(6): - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) assert connection.handshake connection.close() # The 7. one will timeout with pytest.raises(gevent.Timeout): with gevent.Timeout(0.1): - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) # Reset whitelist file_server.whitelist = whitelist diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py index 7e7ca1a5..78065c00 100644 --- a/src/Test/TestContent.py +++ b/src/Test/TestContent.py @@ -1,18 +1,14 @@ import json import time -import io +from cStringIO import StringIO import pytest from Crypt import CryptBitcoin -from Content.ContentManager import VerifyError, SignError -from util.SafeRe import UnsafePatternError @pytest.mark.usefixtures("resetSettings") class TestContent: - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" - def testInclude(self, site): # Rules defined in parent content.json rules = site.content_manager.getRules("data/test_include/content.json") @@ -38,9 +34,9 @@ class TestContent: # Valid signers for root content.json assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] - def testInlcudeLimits(self, site, crypt_bitcoin_lib): + def testInlcudeLimits(self, site): + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # Data validation - res = [] data_dict = { "files": { "data.json": { @@ -52,52 +48,42 @@ class TestContent: } # Normal data - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data_json = json.dumps(data_dict).encode() - data = io.BytesIO(data_json) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - # Reset del data_dict["signs"] # Too large data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Include too large" in str(err.value) - + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) # Reset data_dict["files"]["data.json"]["size"] = 505 del data_dict["signs"] # Not allowed file data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"] - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "File not allowed" in str(err.value) - + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) # Reset del data_dict["files"]["notallowed.exe"] del data_dict["signs"] # Should work again - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey)} - data = io.BytesIO(json.dumps(data_dict).encode()) + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) @pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"]) def testSign(self, site, inner_path): # Bad privatekey - with pytest.raises(SignError) as err: - site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) - assert "Private key invalid" in str(err.value) + assert not site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) # Good privatekey - content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False) + content = site.content_manager.sign(inner_path, privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False) content_old = site.content_manager.contents[inner_path] # Content before the sign assert not content_old == content # Timestamp changed assert site.address in content["signs"] # Used the site's private key to sign @@ -122,10 +108,10 @@ class TestContent: assert len(site.content_manager.hashfield) == 0 site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))" - content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True) + content_optional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False, remove_missing_optional=True) del site.content_manager.contents["content.json"]["optional"] - content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True) + content_nooptional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False, remove_missing_optional=True) assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern assert len(content_optional["files_optional"]) > 0 @@ -134,9 +120,6 @@ class TestContent: def testFileInfo(self, site): assert "sha512" in site.content_manager.getFileInfo("index.html") - assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"] == "content.json" - assert site.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json" - assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json" assert not site.content_manager.getFileInfo("notexist") # Optional file @@ -152,14 +135,15 @@ class TestContent: assert "sha512" in file_info_optional assert file_info_optional["optional"] is True - def testVerify(self, site, crypt_bitcoin_lib): + def testVerify(self, site): + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" inner_path = "data/test_include/content.json" data_dict = site.storage.loadJson(inner_path) - data = io.BytesIO(json.dumps(data_dict).encode("utf8")) + data = StringIO(json.dumps(data_dict)) # Re-sign data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) @@ -167,107 +151,27 @@ class TestContent: data_dict["address"] = "Othersite" del data_dict["signs"] data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong site address" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(inner_path, data, ignore_same=False) # Wrong inner_path data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" data_dict["inner_path"] = "content.json" del data_dict["signs"] data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Wrong inner_path" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(inner_path, data, ignore_same=False) # Everything right again data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" data_dict["inner_path"] = inner_path del data_dict["signs"] data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - - def testVerifyInnerPath(self, site, crypt_bitcoin_lib): - inner_path = "content.json" - data_dict = site.storage.loadJson(inner_path) - - for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg", "árvzítűrő/tükörfúrógép.txt"]: - data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} - - if "sign" in data_dict: - del data_dict["sign"] - del data_dict["signs"] - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) - - for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg", "con.txt", "any/con.txt"]: - data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} - - if "sign" in data_dict: - del data_dict["sign"] - del data_dict["signs"] - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(inner_path, data, ignore_same=False) - assert "Invalid relative path" in str(err.value) - - @pytest.mark.parametrize("key", ["ignore", "optional"]) - def testSignUnsafePattern(self, site, key): - site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*" - with pytest.raises(UnsafePatternError) as err: - site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False) - assert "Potentially unsafe" in str(err.value) - - - def testVerifyUnsafePattern(self, site, crypt_bitcoin_lib): - site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*" - with pytest.raises(UnsafePatternError) as err: - with site.storage.open("data/test_include/content.json") as data: - site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err.value) - - site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0} - with pytest.raises(UnsafePatternError) as err: - with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data: - site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False) - assert "Potentially unsafe" in str(err.value) - - def testPathValidation(self, site): - assert site.content_manager.isValidRelativePath("test.txt") - assert site.content_manager.isValidRelativePath("test/!@#$%^&().txt") - assert site.content_manager.isValidRelativePath("ÜøßÂŒƂÆÇ.txt") - assert site.content_manager.isValidRelativePath("тест.текст") - assert site.content_manager.isValidRelativePath("𝐮𝐧𝐢𝐜𝐨𝐝𝐞𝑖𝑠𝒂𝒘𝒆𝒔𝒐𝒎𝒆") - - # Test rules based on https://stackoverflow.com/questions/1976007/what-characters-are-forbidden-in-windows-and-linux-directory-names - - assert not site.content_manager.isValidRelativePath("any\\hello.txt") # \ not allowed - assert not site.content_manager.isValidRelativePath("/hello.txt") # Cannot start with / - assert not site.content_manager.isValidRelativePath("\\hello.txt") # Cannot start with \ - assert not site.content_manager.isValidRelativePath("../hello.txt") # Not allowed .. in path - assert not site.content_manager.isValidRelativePath("\0hello.txt") # NULL character - assert not site.content_manager.isValidRelativePath("\31hello.txt") # 0-31 (ASCII control characters) - assert not site.content_manager.isValidRelativePath("any/hello.txt ") # Cannot end with space - assert not site.content_manager.isValidRelativePath("any/hello.txt.") # Cannot end with dot - assert site.content_manager.isValidRelativePath(".hello.txt") # Allow start with dot - assert not site.content_manager.isValidRelativePath("any/CON") # Protected names on Windows - assert not site.content_manager.isValidRelativePath("CON/any.txt") - assert not site.content_manager.isValidRelativePath("any/lpt1.txt") - assert site.content_manager.isValidRelativePath("any/CONAN") - assert not site.content_manager.isValidRelativePath("any/CONOUT$") - assert not site.content_manager.isValidRelativePath("a" * 256) # Max 255 characters allowed diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py index 8e91dd3e..6655f5e2 100644 --- a/src/Test/TestContentUser.py +++ b/src/Test/TestContentUser.py @@ -1,20 +1,17 @@ import json -import io +from cStringIO import StringIO import pytest from Crypt import CryptBitcoin -from Content.ContentManager import VerifyError, SignError @pytest.mark.usefixtures("resetSettings") -class TestContentUser: +class TestUserContent: def testSigners(self, site): # File info for not existing user file file_info = site.content_manager.getFileInfo("data/users/notexist/data.json") assert file_info["content_inner_path"] == "data/users/notexist/content.json" - file_info = site.content_manager.getFileInfo("data/users/notexist/a/b/data.json") - assert file_info["content_inner_path"] == "data/users/notexist/content.json" valid_signers = site.content_manager.getValidSigners("data/users/notexist/content.json") assert valid_signers == ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] @@ -32,7 +29,8 @@ class TestContentUser: valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json - assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' not in valid_signers # The user itself + assert not '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' in valid_signers # The user itself + def testRules(self, site): # We going to manipulate it this test rules based on data/users/content.json @@ -65,62 +63,13 @@ class TestContentUser: rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" not in rules["signers"] - def testRulesAddress(self, site): - user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" - user_content = site.storage.loadJson(user_inner_path) - - rules = site.content_manager.getRules(user_inner_path, user_content) - assert rules["max_size"] == 10000 - assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" in rules["signers"] - - users_content = site.content_manager.contents["data/users/content.json"] - - # Ban user based on address - users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = False - rules = site.content_manager.getRules(user_inner_path, user_content) - assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" not in rules["signers"] - - # Change max allowed size - users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} - rules = site.content_manager.getRules(user_inner_path, user_content) - assert rules["max_size"] == 20000 - - def testVerifyAddress(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT - user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" - data_dict = site.storage.loadJson(user_inner_path) - users_content = site.content_manager.contents["data/users/content.json"] - - data = io.BytesIO(json.dumps(data_dict).encode()) - assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - - # Test error on 15k data.json - data_dict["files"]["data.json"]["size"] = 1024 * 15 - del data_dict["signs"] # Remove signs before signing - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err.value) - - # Give more space based on address - users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} - del data_dict["signs"] # Remove signs before signing - data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) - } - data = io.BytesIO(json.dumps(data_dict).encode()) - assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - def testVerify(self, site): privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" data_dict = site.storage.loadJson(user_inner_path) users_content = site.content_manager.contents["data/users/content.json"] - data = io.BytesIO(json.dumps(data_dict).encode()) + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) # Test max size exception by setting allowed to 0 @@ -131,11 +80,8 @@ class TestContentUser: users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 0 rules = site.content_manager.getRules(user_inner_path, data_dict) assert rules["max_size"] == 0 - data = io.BytesIO(json.dumps(data_dict).encode()) - - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include too large" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset # Test max optional size exception @@ -145,7 +91,7 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) + data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) # 100 MB gif = Not allowed @@ -154,10 +100,8 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Include optional files too large" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset # hello.exe = Not allowed @@ -166,22 +110,19 @@ class TestContentUser: data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Optional file not allowed" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) del data_dict["files_optional"]["hello.exe"] # Reset # Includes not allowed in user content - data_dict["includes"] = {"other.json": {}} + data_dict["includes"] = { "other.json": { } } del data_dict["signs"] # Remove signs before signing data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } - data = io.BytesIO(json.dumps(data_dict).encode()) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - assert "Includes not allowed" in str(err.value) + data = StringIO(json.dumps(data_dict)) + assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + def testCert(self, site): # user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" @@ -227,19 +168,16 @@ class TestContentUser: # Test user cert assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False + StringIO(json.dumps(signed_content)), ignore_same=False ) # Test banned user cert_user_id = user_content["cert_user_id"] # My username site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Valid signs: 0/1" in str(err.value) - del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset + assert not site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) # Test invalid cert user_content["cert_sign"] = CryptBitcoin.sign( @@ -248,12 +186,10 @@ class TestContentUser: signed_content = site.content_manager.sign( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False ) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid cert" in str(err.value) + assert not site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) # Test banned user, signed by the site owner user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( @@ -270,7 +206,7 @@ class TestContentUser: } assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(user_content).encode()), ignore_same=False + StringIO(json.dumps(user_content)), ignore_same=False ) def testMissingCert(self, site): @@ -297,88 +233,22 @@ class TestContentUser: assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False + StringIO(json.dumps(signed_content)), ignore_same=False ) - # Test invalid cert_user_id - user_content["cert_user_id"] = "nodomain" - user_content["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv) - } - signed_content = site.content_manager.sign( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False - ) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid domain in cert_user_id" in str(err.value) - # Test removed cert - del user_content["cert_user_id"] + # user_content["cert_sign"] del user_content["cert_auth_type"] del user_content["signs"] # Remove signs before signing user_content["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv) } - signed_content = site.content_manager.sign( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False - ) - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Missing cert_user_id" in str(err.value) - - - def testCertSignersPattern(self, site): - user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A" - cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" # For 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet - - user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] - rules_content = site.content_manager.contents["data/users/content.json"] - - # Override valid cert signers for the test - rules_content["user_contents"]["cert_signers_pattern"] = "14wgQ[0-9][A-Z]" - - # Sign a valid cert - user_content["cert_user_id"] = "certuser@14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" - user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( - user_content["cert_auth_type"], - "certuser" - ), cert_priv) - signed_content = site.content_manager.sign( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False - ) - - assert site.content_manager.verifyFile( + print "--- Signed content", user_content + assert not site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False + StringIO(json.dumps(user_content)), ignore_same=False ) - # Cert does not matches the pattern - rules_content["user_contents"]["cert_signers_pattern"] = "14wgX[0-9][A-Z]" - - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) - - # Removed cert_signers_pattern - del rules_content["user_contents"]["cert_signers_pattern"] - - with pytest.raises(VerifyError) as err: - site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - io.BytesIO(json.dumps(signed_content).encode()), ignore_same=False - ) - assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err.value) - - def testNewFile(self, site): privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT inner_path = "data/users/1NEWrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" @@ -388,3 +258,4 @@ class TestContentUser: assert "test" in site.storage.loadJson(inner_path) site.storage.delete(inner_path) + diff --git a/src/Test/TestCryptBitcoin.py b/src/Test/TestCryptBitcoin.py index 2bc087b5..bebf906e 100644 --- a/src/Test/TestCryptBitcoin.py +++ b/src/Test/TestCryptBitcoin.py @@ -2,37 +2,46 @@ from Crypt import CryptBitcoin class TestCryptBitcoin: - def testSign(self, crypt_bitcoin_lib): + def testSignOld(self): + privatekey = "23DKQpDz7bXM7w5KN5Wnmz7bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf" + privatekey_bad = "23DKQpDz7bXM7w5KN5Wnmz6bwRNqNHcdQjb2WwrdB1QtTf5gM3pFdf" + + # Get address by privatekey + address = CryptBitcoin.privatekeyToAddress(privatekey) + assert address == "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez" + + address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad) + assert not address_bad == "12vTsjscg4hYPewUL2onma5pgQmWPMs3ez" + + # Text signing + sign = CryptBitcoin.signOld("hello", privatekey) + assert CryptBitcoin.verify("hello", address, sign) # Original text + assert not CryptBitcoin.verify("not hello", address, sign) # Different text + + # Signed by bad privatekey + sign_bad = CryptBitcoin.signOld("hello", privatekey_bad) + assert not CryptBitcoin.verify("hello", address, sign_bad) + + def testSign(self): privatekey = "5K9S6dVpufGnroRgFrT6wsKiz2mJRYsC73eWDmajaHserAp3F1C" privatekey_bad = "5Jbm9rrusXyApAoM8YoM4Rja337zMMoBUMRJ1uijiguU2aZRnwC" # Get address by privatekey - address = crypt_bitcoin_lib.privatekeyToAddress(privatekey) + address = CryptBitcoin.privatekeyToAddress(privatekey) assert address == "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz" - address_bad = crypt_bitcoin_lib.privatekeyToAddress(privatekey_bad) + address_bad = CryptBitcoin.privatekeyToAddress(privatekey_bad) assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz" # Text signing - data_len_list = list(range(0, 300, 10)) - data_len_list += [1024, 2048, 1024 * 128, 1024 * 1024, 1024 * 2048] - for data_len in data_len_list: - data = data_len * "!" - sign = crypt_bitcoin_lib.sign(data, privatekey) + sign = CryptBitcoin.sign("hello", privatekey) - assert crypt_bitcoin_lib.verify(data, address, sign) - assert not crypt_bitcoin_lib.verify("invalid" + data, address, sign) + assert CryptBitcoin.verify("hello", address, sign) + assert not CryptBitcoin.verify("not hello", address, sign) # Signed by bad privatekey - sign_bad = crypt_bitcoin_lib.sign("hello", privatekey_bad) - assert not crypt_bitcoin_lib.verify("hello", address, sign_bad) - - def testVerify(self, crypt_bitcoin_lib): - sign_uncompressed = b'G6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0=' - assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "19Bir5zRm1yo4pw9uuxQL8xwf9b7jqMpR", sign_uncompressed) - - sign_compressed = b'H6YkcFTuwKMVMHI2yycGQIFGbCZVNsZEZvSlOhKpHUt/BlADY94egmDAWdlrbbFrP9wH4aKcEfbLO8sa6f63VU0=' - assert crypt_bitcoin_lib.verify("1NQUem2M4cAqWua6BVFBADtcSP55P4QobM#web/gitcenter", "1KH5BdNnqxh2KRWMMT8wUXzUgz4vVQ4S8p", sign_compressed) + sign_bad = CryptBitcoin.sign("hello", privatekey_bad) + assert not CryptBitcoin.verify("hello", address, sign_bad) def testNewPrivatekey(self): assert CryptBitcoin.newPrivatekey() != CryptBitcoin.newPrivatekey() diff --git a/src/Test/TestCryptHash.py b/src/Test/TestCryptHash.py deleted file mode 100644 index b91dbcca..00000000 --- a/src/Test/TestCryptHash.py +++ /dev/null @@ -1,31 +0,0 @@ -import base64 - -from Crypt import CryptHash - -sha512t_sum_hex = "2e9466d8aa1f340c91203b4ddbe9b6669879616a1b8e9571058a74195937598d" -sha512t_sum_bin = b".\x94f\xd8\xaa\x1f4\x0c\x91 ;M\xdb\xe9\xb6f\x98yaj\x1b\x8e\x95q\x05\x8at\x19Y7Y\x8d" -sha256_sum_hex = "340cd04be7f530e3a7c1bc7b24f225ba5762ec7063a56e1ae01a30d56722e5c3" - - -class TestCryptBitcoin: - - def testSha(self, site): - file_path = site.storage.getPath("dbschema.json") - assert CryptHash.sha512sum(file_path) == sha512t_sum_hex - assert CryptHash.sha512sum(open(file_path, "rb")) == sha512t_sum_hex - assert CryptHash.sha512sum(open(file_path, "rb"), format="digest") == sha512t_sum_bin - - assert CryptHash.sha256sum(file_path) == sha256_sum_hex - assert CryptHash.sha256sum(open(file_path, "rb")) == sha256_sum_hex - - with open(file_path, "rb") as f: - hash = CryptHash.Sha512t(f.read(100)) - hash.hexdigest() != sha512t_sum_hex - hash.update(f.read(1024 * 1024)) - assert hash.hexdigest() == sha512t_sum_hex - - def testRandom(self): - assert len(CryptHash.random(64)) == 64 - assert CryptHash.random() != CryptHash.random() - assert bytes.fromhex(CryptHash.random(encoding="hex")) - assert base64.b64decode(CryptHash.random(encoding="base64")) diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py index 67f383a3..9858563d 100644 --- a/src/Test/TestDb.py +++ b/src/Test/TestDb.py @@ -1,4 +1,8 @@ -import io +import os +import cStringIO as StringIO + +from Config import config +from Db import Db class TestDb: @@ -49,81 +53,14 @@ class TestDb: {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]} ).fetchone()["num"] == 2 - # Test multiple select using named params - assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title = :title", - {"test_id": [1, 2, 3], "title": "Test #2"} - ).fetchone()["num"] == 1 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title IN :title", - {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]} - ).fetchone()["num"] == 2 - - # Large ammount of IN values - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__test_id": list(range(2, 3000))} - ).fetchone()["num"] == 2 - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"test_id": list(range(50, 3000))} - ).fetchone()["num"] == 50 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__title": ["Test #%s" % i for i in range(50, 3000)]} - ).fetchone()["num"] == 50 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"title__like": "%20%"} - ).fetchone()["num"] == 1 - # Test named parameter escaping assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike", {"test_id": 1, "titlelike": "Test%"} ).fetchone()["num"] == 1 - def testEscaping(self, db): - # Test insert - for i in range(100): - db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test '\" #%s" % i}) - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"title": "Test '\" #1"} - ).fetchone()["num"] == 1 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"title": ["Test '\" #%s" % i for i in range(0, 50)]} - ).fetchone()["num"] == 50 - - assert db.execute( - "SELECT COUNT(*) AS num FROM test WHERE ?", - {"not__title": ["Test '\" #%s" % i for i in range(50, 3000)]} - ).fetchone()["num"] == 50 - - def testUpdateJson(self, db): - f = io.BytesIO() - f.write(""" - { - "test": [ - {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"} - ] - } - """.encode()) - f.seek(0) - assert db.updateJson(db.db_dir + "data.json", f) is True - assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1 - assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1 - - def testUnsafePattern(self, db): - db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported - f = io.StringIO() + f = StringIO.StringIO() f.write(""" { "test": [ @@ -132,6 +69,6 @@ class TestDb: } """) f.seek(0) - assert db.updateJson(db.db_dir + "data.json", f) is False - assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0 - assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0 + assert db.updateJson(db.db_dir + "data.json", f) == True + assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1 + assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1 diff --git a/src/Test/TestDbQuery.py b/src/Test/TestDbQuery.py index 597bc950..214704a4 100644 --- a/src/Test/TestDbQuery.py +++ b/src/Test/TestDbQuery.py @@ -1,6 +1,6 @@ import re -from Db.DbQuery import DbQuery +from Db import DbQuery class TestDbQuery: diff --git a/src/Test/TestDebug.py b/src/Test/TestDebug.py deleted file mode 100644 index e3eb20b3..00000000 --- a/src/Test/TestDebug.py +++ /dev/null @@ -1,52 +0,0 @@ -from Debug import Debug -import gevent -import os -import re - -import pytest - - -class TestDebug: - @pytest.mark.parametrize("items,expected", [ - (["@/src/A/B/C.py:17"], ["A/B/C.py line 17"]), # basic test - (["@/src/Db/Db.py:17"], ["Db.py line 17"]), # path compression - (["%s:1" % __file__], ["TestDebug.py line 1"]), - (["@/plugins/Chart/ChartDb.py:100"], ["ChartDb.py line 100"]), # plugins - (["@/main.py:17"], ["main.py line 17"]), # root - (["@\\src\\Db\\__init__.py:17"], ["Db/__init__.py line 17"]), # Windows paths - ([":1"], []), # importlib builtins - ([":1"], []), # importlib builtins - (["/home/ivanq/ZeroNet/src/main.py:13"], ["?/src/main.py line 13"]), # best-effort anonymization - (["C:\\ZeroNet\\core\\src\\main.py:13"], ["?/src/main.py line 13"]), - (["/root/main.py:17"], ["/root/main.py line 17"]), - (["{gevent}:13"], ["/__init__.py line 13"]), # modules - (["{os}:13"], [" line 13"]), # python builtin modules - (["src/gevent/event.py:17"], ["/event.py line 17"]), # gevent-overriden __file__ - (["@/src/Db/Db.py:17", "@/src/Db/DbQuery.py:1"], ["Db.py line 17", "DbQuery.py line 1"]), # mutliple args - (["@/src/Db/Db.py:17", "@/src/Db/Db.py:1"], ["Db.py line 17", "1"]), # same file - (["{os}:1", "@/src/Db/Db.py:17"], [" line 1", "Db.py line 17"]), # builtins - (["{gevent}:1"] + ["{os}:3"] * 4 + ["@/src/Db/Db.py:17"], ["/__init__.py line 1", "...", "Db.py line 17"]) - ]) - def testFormatTraceback(self, items, expected): - q_items = [] - for item in items: - file, line = item.rsplit(":", 1) - if file.startswith("@"): - file = Debug.root_dir + file[1:] - file = file.replace("{os}", os.__file__) - file = file.replace("{gevent}", gevent.__file__) - q_items.append((file, int(line))) - assert Debug.formatTraceback(q_items) == expected - - def testFormatException(self): - try: - raise ValueError("Test exception") - except Exception: - assert re.match(r"ValueError: Test exception in TestDebug.py line [0-9]+", Debug.formatException()) - try: - os.path.abspath(1) - except Exception: - assert re.search(r"in TestDebug.py line [0-9]+ > <(posixpath|ntpath)> line ", Debug.formatException()) - - def testFormatStack(self): - assert re.match(r"TestDebug.py line [0-9]+ > <_pytest>/python.py line [0-9]+", Debug.formatStack()) diff --git a/src/Test/TestDiff.py b/src/Test/TestDiff.py index 622951a1..0e387e2a 100644 --- a/src/Test/TestDiff.py +++ b/src/Test/TestDiff.py @@ -1,4 +1,4 @@ -import io +import cStringIO as StringIO from util import Diff @@ -30,26 +30,20 @@ class TestDiff: [] ) == [("-", 11)] - def testUtf8(self): - assert Diff.diff( - ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three"], - ["one", "\xe5\xad\xa6\xe4\xb9\xa0\xe4\xb8\x8b", "two", "three", "four", "five"] - ) == [("=", 20), ("+", ["four", "five"])] - def testDiffLimit(self): - old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") - new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") + old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") + new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix") actions = Diff.diff(list(old_f), list(new_f), limit=1024) assert actions - old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") - new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix"*1024) + old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") + new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix"*1024) actions = Diff.diff(list(old_f), list(new_f), limit=1024) assert actions is False def testPatch(self): - old_f = io.BytesIO(b"one\ntwo\nthree\nhmm\nsix") - new_f = io.BytesIO(b"one\ntwo\nthree\nfour\nfive\nsix") + old_f = StringIO.StringIO("one\ntwo\nthree\nhmm\nsix") + new_f = StringIO.StringIO("one\ntwo\nthree\nfour\nfive\nsix") actions = Diff.diff( list(old_f), list(new_f) diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py index 3fabc271..9b42ffce 100644 --- a/src/Test/TestFileRequest.py +++ b/src/Test/TestFileRequest.py @@ -1,4 +1,4 @@ -import io +import cStringIO as StringIO import pytest import time @@ -13,17 +13,13 @@ from File import FileServer class TestFileRequest: def testGetFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer(file_server.ip, 1545) + client = ConnectionServer("127.0.0.1", 1545) - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site - # Normal request response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) - assert b"sign" in response["body"] - - response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")}) - assert b"sign" in response["body"] + assert "sign" in response["body"] # Invalid file response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}) @@ -35,7 +31,7 @@ class TestFileRequest: # Stream from parent dir response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0}) - assert "File read exception" in response["error"] + assert "File read error" in response["error"] # Invalid site response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0}) @@ -44,81 +40,60 @@ class TestFileRequest: response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0}) assert "Unknown site" in response["error"] - # Invalid size - response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234}) - assert "File size does not match" in response["error"] - - # Invalid path - for path in ["../users.json", "./../users.json", "data/../content.json", ".../users.json"]: - for sep in ["/", "\\"]: - response = connection.request("getFile", {"site": site.address, "inner_path": path.replace("/", sep), "location": 0}) - assert response["error"] == 'File read exception' - connection.close() client.stop() def testStreamFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer(file_server.ip, 1545) - connection = client.getConnection(file_server.ip, 1544) + client = ConnectionServer("127.0.0.1", 1545) + connection = client.getConnection("127.0.0.1", 1544) file_server.sites[site.address] = site - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff) assert "stream_bytes" in response - assert b"sign" in buff.getvalue() + assert "sign" in buff.getvalue() # Invalid file - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff) assert "File read error" in response["error"] # Location over size - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request( "streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff ) assert "File read error" in response["error"] # Stream from parent dir - buff = io.BytesIO() + buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff) - assert "File read exception" in response["error"] + assert "File read error" in response["error"] connection.close() client.stop() def testPex(self, file_server, site, site_temp): file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) # Add new fake peer to site - fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True) + fake_peer = site.addPeer("1.2.3.4", 11337, return_peer=True) # Add fake connection to it - fake_peer.connection = Connection(file_server, file_server.ip_external, 11337) + fake_peer.connection = Connection(file_server, "1.2.3.4", 11337) fake_peer.connection.last_recv_time = time.time() assert fake_peer in site.getConnectablePeers() # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) - assert "%s:11337" % file_server.ip_external not in site_temp.peers + assert "1.2.3.4:11337" not in site_temp.peers assert peer_file_server.pex() - assert "%s:11337" % file_server.ip_external in site_temp.peers - - # Should not exchange private peers from local network - fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True) - assert fake_peer_private not in site.getConnectablePeers(allow_private=False) - fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337) - fake_peer_private.connection.last_recv_time = time.time() - - assert "192.168.0.1:11337" not in site_temp.peers - assert not peer_file_server.pex() - assert "192.168.0.1:11337" not in site_temp.peers - + assert "1.2.3.4:11337" in site_temp.peers connection.close() client.stop() diff --git a/src/Test/TestFlag.py b/src/Test/TestFlag.py deleted file mode 100644 index 12fd8165..00000000 --- a/src/Test/TestFlag.py +++ /dev/null @@ -1,39 +0,0 @@ -import os - -import pytest - -from util.Flag import Flag - -class TestFlag: - def testFlagging(self): - flag = Flag() - @flag.admin - @flag.no_multiuser - def testFn(anything): - return anything - - assert "admin" in flag.db["testFn"] - assert "no_multiuser" in flag.db["testFn"] - - def testSubclassedFlagging(self): - flag = Flag() - class Test: - @flag.admin - @flag.no_multiuser - def testFn(anything): - return anything - - class SubTest(Test): - pass - - assert "admin" in flag.db["testFn"] - assert "no_multiuser" in flag.db["testFn"] - - def testInvalidFlag(self): - flag = Flag() - with pytest.raises(Exception) as err: - @flag.no_multiuser - @flag.unknown_flag - def testFn(anything): - return anything - assert "Invalid flag" in str(err.value) diff --git a/src/Test/TestHelper.py b/src/Test/TestHelper.py index 07644ec0..28f7f6fb 100644 --- a/src/Test/TestHelper.py +++ b/src/Test/TestHelper.py @@ -1,10 +1,7 @@ import socket -import struct -import os import pytest from util import helper -from Config import config @pytest.mark.usefixtures("resetSettings") @@ -15,25 +12,13 @@ class TestHelper: assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"') def testPackAddress(self): - for port in [1, 1000, 65535]: - for ip in ["1.1.1.1", "127.0.0.1", "0.0.0.0", "255.255.255.255", "192.168.1.1"]: - assert len(helper.packAddress(ip, port)) == 6 - assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port) - - for ip in ["1:2:3:4:5:6:7:8", "::1", "2001:19f0:6c01:e76:5400:1ff:fed6:3eca", "2001:4860:4860::8888"]: - assert len(helper.packAddress(ip, port)) == 18 - assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port) - - assert len(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == 12 - assert helper.unpackOnionAddress(helper.packOnionAddress("boot3rdez4rzn36x.onion", port)) == ("boot3rdez4rzn36x.onion", port) - - with pytest.raises(struct.error): - helper.packAddress("1.1.1.1", 100000) + assert len(helper.packAddress("1.1.1.1", 1)) == 6 + assert helper.unpackAddress(helper.packAddress("1.1.1.1", 1)) == ("1.1.1.1", 1) with pytest.raises(socket.error): helper.packAddress("999.1.1.1", 1) - with pytest.raises(Exception): + with pytest.raises(AssertionError): helper.unpackAddress("X") def testGetDirname(self): @@ -42,7 +27,8 @@ class TestHelper: assert helper.getDirname("") == "" assert helper.getDirname("content.json") == "" assert helper.getDirname("data/users/") == "data/users/" - assert helper.getDirname("/data/users/content.json") == "data/users/" + assert helper.getDirname("/data/users/content.json") == "/data/users/" + def testGetFilename(self): assert helper.getFilename("data/users/content.json") == "content.json" @@ -50,30 +36,4 @@ class TestHelper: assert helper.getFilename("") == "" assert helper.getFilename("content.json") == "content.json" assert helper.getFilename("data/users/") == "" - assert helper.getFilename("/data/users/content.json") == "content.json" - - def testIsIp(self): - assert helper.isIp("1.2.3.4") - assert helper.isIp("255.255.255.255") - assert not helper.isIp("any.host") - assert not helper.isIp("1.2.3.4.com") - assert not helper.isIp("1.2.3.4.any.host") - - def testIsPrivateIp(self): - assert helper.isPrivateIp("192.168.1.1") - assert not helper.isPrivateIp("1.1.1.1") - assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c") - assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns - - def testOpenLocked(self): - locked_f = helper.openLocked(config.data_dir + "/locked.file") - assert locked_f - with pytest.raises(BlockingIOError): - locked_f_again = helper.openLocked(config.data_dir + "/locked.file") - locked_f_different = helper.openLocked(config.data_dir + "/locked_different.file") - - locked_f.close() - locked_f_different.close() - - os.unlink(locked_f.name) - os.unlink(locked_f_different.name) + assert helper.getFilename("/data/users/content.json") == "content.json" \ No newline at end of file diff --git a/src/Test/TestMsgpack.py b/src/Test/TestMsgpack.py deleted file mode 100644 index 5a0b6d4d..00000000 --- a/src/Test/TestMsgpack.py +++ /dev/null @@ -1,88 +0,0 @@ -import io -import os - -import msgpack -import pytest - -from Config import config -from util import Msgpack -from collections import OrderedDict - - -class TestMsgpack: - test_data = OrderedDict( - sorted({"cmd": "fileGet", "bin": b'p\x81zDhL\xf0O\xd0\xaf', "params": {"site": "1Site"}, "utf8": b'\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91'.decode("utf8"), "list": [b'p\x81zDhL\xf0O\xd0\xaf', b'p\x81zDhL\xf0O\xd0\xaf']}.items()) - ) - - def testPacking(self): - assert Msgpack.pack(self.test_data) == b'\x85\xa3bin\xc4\np\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xc4\np\x81zDhL\xf0O\xd0\xaf\xc4\np\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91' - assert Msgpack.pack(self.test_data, use_bin_type=False) == b'\x85\xa3bin\xaap\x81zDhL\xf0O\xd0\xaf\xa3cmd\xa7fileGet\xa4list\x92\xaap\x81zDhL\xf0O\xd0\xaf\xaap\x81zDhL\xf0O\xd0\xaf\xa6params\x81\xa4site\xa51Site\xa4utf8\xad\xc3\xa1rv\xc3\xadzt\xc5\xb1r\xc5\x91' - - def testUnpackinkg(self): - assert Msgpack.unpack(Msgpack.pack(self.test_data)) == self.test_data - - @pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker]) - def testUnpacker(self, unpacker_class): - unpacker = unpacker_class(raw=False) - - data = msgpack.packb(self.test_data, use_bin_type=True) - data += msgpack.packb(self.test_data, use_bin_type=True) - - messages = [] - for char in data: - unpacker.feed(bytes([char])) - for message in unpacker: - messages.append(message) - - assert len(messages) == 2 - assert messages[0] == self.test_data - assert messages[0] == messages[1] - - def testStreaming(self): - bin_data = os.urandom(20) - f = Msgpack.FilePart("%s/users.json" % config.data_dir, "rb") - f.read_bytes = 30 - - data = {"cmd": "response", "body": f, "bin": bin_data} - - out_buff = io.BytesIO() - Msgpack.stream(data, out_buff.write) - out_buff.seek(0) - - data_packb = { - "cmd": "response", - "body": open("%s/users.json" % config.data_dir, "rb").read(30), - "bin": bin_data - } - - out_buff.seek(0) - data_unpacked = Msgpack.unpack(out_buff.read()) - assert data_unpacked == data_packb - assert data_unpacked["cmd"] == "response" - assert type(data_unpacked["body"]) == bytes - - def testBackwardCompatibility(self): - packed = {} - packed["py3"] = Msgpack.pack(self.test_data, use_bin_type=False) - packed["py3_bin"] = Msgpack.pack(self.test_data, use_bin_type=True) - for key, val in packed.items(): - unpacked = Msgpack.unpack(val) - type(unpacked["utf8"]) == str - type(unpacked["bin"]) == bytes - - # Packed with use_bin_type=False (pre-ZeroNet 0.7.0) - unpacked = Msgpack.unpack(packed["py3"], decode=True) - type(unpacked["utf8"]) == str - type(unpacked["bin"]) == bytes - assert len(unpacked["utf8"]) == 9 - assert len(unpacked["bin"]) == 10 - with pytest.raises(UnicodeDecodeError) as err: # Try to decode binary as utf-8 - unpacked = Msgpack.unpack(packed["py3"], decode=False) - - # Packed with use_bin_type=True - unpacked = Msgpack.unpack(packed["py3_bin"], decode=False) - type(unpacked["utf8"]) == str - type(unpacked["bin"]) == bytes - assert len(unpacked["utf8"]) == 9 - assert len(unpacked["bin"]) == 10 - diff --git a/src/Test/TestNoparallel.py b/src/Test/TestNoparallel.py index 6fc4f57d..27528dae 100644 --- a/src/Test/TestNoparallel.py +++ b/src/Test/TestNoparallel.py @@ -1,20 +1,7 @@ import time -import gevent -import pytest - import util -from util import ThreadPool - - -@pytest.fixture(params=['gevent.spawn', 'thread_pool.spawn']) -def queue_spawn(request): - thread_pool = ThreadPool.ThreadPool(10) - if request.param == "gevent.spawn": - return gevent.spawn - else: - return thread_pool.spawn - +import gevent class ExampleClass(object): def __init__(self): @@ -22,41 +9,41 @@ class ExampleClass(object): @util.Noparallel() def countBlocking(self, num=5): - for i in range(1, num + 1): - time.sleep(0.1) + for i in range(1, num+1): + time.sleep(0.01) self.counted += 1 return "counted:%s" % i @util.Noparallel(queue=True, ignore_class=True) def countQueue(self, num=5): - for i in range(1, num + 1): - time.sleep(0.1) + for i in range(1, num+1): + time.sleep(0.01) self.counted += 1 return "counted:%s" % i @util.Noparallel(blocking=False) def countNoblocking(self, num=5): - for i in range(1, num + 1): + for i in range(1, num+1): time.sleep(0.01) self.counted += 1 return "counted:%s" % i class TestNoparallel: - def testBlocking(self, queue_spawn): + def testBlocking(self): obj1 = ExampleClass() obj2 = ExampleClass() # Dont allow to call again until its running and wait until its running threads = [ - queue_spawn(obj1.countBlocking), - queue_spawn(obj1.countBlocking), - queue_spawn(obj1.countBlocking), - queue_spawn(obj2.countBlocking) + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj1.countBlocking), + gevent.spawn(obj2.countBlocking) ] assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes gevent.joinall(threads) - assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"] + assert [thread.value for thread in threads] == ["counted:5","counted:5","counted:5","counted:5"] # Check the return value for every call obj2.countBlocking() # Allow to call again as obj2.countBlocking finished assert obj1.counted == 5 @@ -64,6 +51,7 @@ class TestNoparallel: def testNoblocking(self): obj1 = ExampleClass() + obj2 = ExampleClass() thread1 = obj1.countNoblocking() thread2 = obj1.countNoblocking() # Ignored @@ -77,91 +65,33 @@ class TestNoparallel: obj1.countNoblocking().join() # Allow again and wait until finishes assert obj1.counted == 10 - def testQueue(self, queue_spawn): + def testQueue(self): obj1 = ExampleClass() - queue_spawn(obj1.countQueue, num=1) - queue_spawn(obj1.countQueue, num=1) - queue_spawn(obj1.countQueue, num=1) - - time.sleep(0.3) - assert obj1.counted == 2 # No multi-queue supported - - obj2 = ExampleClass() - queue_spawn(obj2.countQueue, num=10) - queue_spawn(obj2.countQueue, num=10) - - time.sleep(1.5) # Call 1 finished, call 2 still working - assert 10 < obj2.counted < 20 - - queue_spawn(obj2.countQueue, num=10) - time.sleep(2.0) - - assert obj2.counted == 30 - - def testQueueOverload(self): - obj1 = ExampleClass() - - threads = [] - for i in range(1000): - thread = gevent.spawn(obj1.countQueue, num=5) - threads.append(thread) - + threads = [ + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue) + ] gevent.joinall(threads) - assert obj1.counted == 5 * 2 # Only called twice (no multi-queue allowed) - def testIgnoreClass(self, queue_spawn): + assert obj1.counted == 15 # Calls should be executed sequentially + + def testIngoreClass(self): obj1 = ExampleClass() obj2 = ExampleClass() threads = [ - queue_spawn(obj1.countQueue), - queue_spawn(obj1.countQueue), - queue_spawn(obj1.countQueue), - queue_spawn(obj2.countQueue), - queue_spawn(obj2.countQueue) + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue), + gevent.spawn(obj2.countQueue), + gevent.spawn(obj2.countQueue) ] s = time.time() - time.sleep(0.001) gevent.joinall(threads) - - # Queue limited to 2 calls (every call takes counts to 5 and takes 0.05 sec) - assert obj1.counted + obj2.counted == 10 + assert obj1.counted == 15 + assert obj2.counted == 10 taken = time.time() - s - assert 1.2 > taken >= 1.0 # 2 * 0.5s count = ~1s - - def testException(self, queue_spawn): - class MyException(Exception): - pass - - @util.Noparallel() - def raiseException(): - raise MyException("Test error!") - - with pytest.raises(MyException) as err: - raiseException() - assert str(err.value) == "Test error!" - - with pytest.raises(MyException) as err: - queue_spawn(raiseException).get() - assert str(err.value) == "Test error!" - - def testMultithreadMix(self, queue_spawn): - obj1 = ExampleClass() - with ThreadPool.ThreadPool(10) as thread_pool: - s = time.time() - t1 = queue_spawn(obj1.countBlocking, 5) - time.sleep(0.01) - t2 = thread_pool.spawn(obj1.countBlocking, 5) - time.sleep(0.01) - t3 = thread_pool.spawn(obj1.countBlocking, 5) - time.sleep(0.3) - t4 = gevent.spawn(obj1.countBlocking, 5) - threads = [t1, t2, t3, t4] - for thread in threads: - assert thread.get() == "counted:5" - - time_taken = time.time() - s - assert obj1.counted == 5 - assert 0.5 < time_taken < 0.7 + assert taken >= 0.25 # Every count takes 0.05sec diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py index f57e046e..f0d81033 100644 --- a/src/Test/TestPeer.py +++ b/src/Test/TestPeer.py @@ -1,26 +1,27 @@ import time -import io +from cStringIO import StringIO import pytest from File import FileServer from File import FileRequest from Crypt import CryptHash -from . import Spy +import Spy @pytest.mark.usefixtures("resetSettings") @pytest.mark.usefixtures("resetTempSettings") class TestPeer: def testPing(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) assert peer_file_server.ping() is not None @@ -32,28 +33,29 @@ class TestPeer: client.stop() def testDownloadFile(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection(file_server.ip, 1544) + connection = client.getConnection("127.0.0.1", 1544) # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) # Testing streamFile - buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True) - assert b"sign" in buff.getvalue() + buff = peer_file_server.streamFile(site_temp.address, "content.json") + assert "sign" in buff.getvalue() # Testing getFile buff = peer_file_server.getFile(site_temp.address, "content.json") - assert b"sign" in buff.getvalue() + assert "sign" in buff.getvalue() connection.close() client.stop() def testHashfield(self, site): - sample_hash = list(site.content_manager.contents["content.json"]["files_optional"].values())[0]["sha512"] + sample_hash = site.content_manager.contents["content.json"]["files_optional"].values()[0]["sha512"] site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -65,7 +67,7 @@ class TestPeer: assert site.content_manager.hashfield.getHashId(sample_hash) in site.content_manager.hashfield # Add new hash - new_hash = CryptHash.sha512sum(io.BytesIO(b"hello")) + new_hash = CryptHash.sha512sum(StringIO("hello")) assert site.content_manager.hashfield.getHashId(new_hash) not in site.content_manager.hashfield assert site.content_manager.hashfield.appendHash(new_hash) assert not site.content_manager.hashfield.appendHash(new_hash) # Don't add second time @@ -77,16 +79,15 @@ class TestPeer: def testHashfieldExchange(self, file_server, site, site_temp): server1 = file_server + server1.ip_incoming = {} # Reset flood protection server1.sites[site.address] = site - site.connection_server = server1 - - server2 = FileServer(file_server.ip, 1545) + server2 = FileServer("127.0.0.1", 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 site.storage.verifyFiles(quick_check=True) # Find what optional files we have # Add file_server as peer to client - server2_peer1 = site_temp.addPeer(file_server.ip, 1544) + server2_peer1 = site_temp.addPeer("127.0.0.1", 1544) # Check if hashfield has any files assert len(site.content_manager.hashfield) > 0 @@ -98,7 +99,7 @@ class TestPeer: # Test force push new hashfield site_temp.content_manager.hashfield.appendHash("AABB") - server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) + server1_peer2 = site.addPeer("127.0.0.1", 1545, return_peer=True) with Spy.Spy(FileRequest, "route") as requests: assert len(server1_peer2.hashfield) == 0 server2_peer1.sendMyHashfield() @@ -127,18 +128,19 @@ class TestPeer: server2.stop() def testFindHash(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) assert peer_file_server.findHashIds([1234]) == {} # Add fake peer with requred hash - fake_peer_1 = site.addPeer(file_server.ip_external, 1544) + fake_peer_1 = site.addPeer("1.2.3.4", 1544) fake_peer_1.hashfield.append(1234) fake_peer_2 = site.addPeer("1.2.3.5", 1545) fake_peer_2.hashfield.append(1234) @@ -147,13 +149,14 @@ class TestPeer: fake_peer_3.hashfield.append(1235) fake_peer_3.hashfield.append(1236) - res = peer_file_server.findHashIds([1234, 1235]) - assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)]) - assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)]) + assert peer_file_server.findHashIds([1234, 1235]) == { + 1234: [('1.2.3.4', 1544), ('1.2.3.5', 1545)], + 1235: [('1.2.3.5', 1545), ('1.2.3.6', 1546)] + } # Test my address adding site.content_manager.hashfield.append(1234) res = peer_file_server.findHashIds([1234, 1235]) - assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)]) - assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)]) + assert res[1234] == [('1.2.3.4', 1544), ('1.2.3.5', 1545), ("127.0.0.1", 1544)] + assert res[1235] == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] \ No newline at end of file diff --git a/src/Test/TestRateLimit.py b/src/Test/TestRateLimit.py index fafa5f1a..b0a91ba0 100644 --- a/src/Test/TestRateLimit.py +++ b/src/Test/TestRateLimit.py @@ -37,7 +37,6 @@ class TestRateLimit: assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted" assert around(time.time() - s, 0.1) # Delays second call within interval assert obj1.counted == 2 - time.sleep(0.1) # Wait the cooldown time # Call 3 times async s = time.time() @@ -51,11 +50,6 @@ class TestRateLimit: assert [thread.value for thread in threads] == ["counted", "counted", "counted"] assert around(time.time() - s, 0.2) - # Wait 0.1s cooldown - assert not RateLimit.isAllowed("counting", 0.1) - time.sleep(0.11) - assert RateLimit.isAllowed("counting", 0.1) - # No queue = instant again s = time.time() assert RateLimit.isAllowed("counting", 0.1) @@ -89,12 +83,12 @@ class TestRateLimit: assert obj1.counted == 2 assert obj1.last_called == "call #4" - # Just called, not allowed again - assert not RateLimit.isAllowed("counting async", 0.1) + # Allowed again instantly + assert RateLimit.isAllowed("counting async", 0.1) s = time.time() - t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join() + RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join() assert obj1.counted == 3 - assert around(time.time() - s, 0.1) + assert around(time.time() - s, 0.0) assert not RateLimit.isAllowed("counting async", 0.1) time.sleep(0.11) assert RateLimit.isAllowed("counting async", 0.1) diff --git a/src/Test/TestSafeRe.py b/src/Test/TestSafeRe.py deleted file mode 100644 index 429bde50..00000000 --- a/src/Test/TestSafeRe.py +++ /dev/null @@ -1,24 +0,0 @@ -from util import SafeRe - -import pytest - - -class TestSafeRe: - def testSafeMatch(self): - assert SafeRe.match( - "((js|css)/(?!all.(js|css))|data/users/.*db|data/users/.*/.*|data/archived|.*.py)", - "js/ZeroTalk.coffee" - ) - assert SafeRe.match(".+/data.json", "data/users/1J3rJ8ecnwH2EPYa6MrgZttBNc61ACFiCj/data.json") - - @pytest.mark.parametrize("pattern", ["([a-zA-Z]+)*", "(a|aa)+*", "(a|a?)+", "(.*a){10}", "((?!json).)*$", r"(\w+\d+)+C"]) - def testUnsafeMatch(self, pattern): - with pytest.raises(SafeRe.UnsafePatternError) as err: - SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "Potentially unsafe" in str(err.value) - - @pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"]) - def testUnsafeRepetition(self, pattern): - with pytest.raises(SafeRe.UnsafePatternError) as err: - SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") - assert "More than" in str(err.value) diff --git a/src/Test/TestSite.py b/src/Test/TestSite.py index 05bb2ed9..71d1706f 100644 --- a/src/Test/TestSite.py +++ b/src/Test/TestSite.py @@ -4,17 +4,16 @@ import os import pytest from Site import SiteManager -TEST_DATA_PATH = "src/Test/testdata" @pytest.mark.usefixtures("resetSettings") class TestSite: def testClone(self, site): - assert site.storage.directory == TEST_DATA_PATH + "/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" + assert site.storage.directory == "src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" # Remove old files - if os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"): - shutil.rmtree(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") - assert not os.path.isfile(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json") + if os.path.isdir("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL"): + shutil.rmtree("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") + assert not os.path.isfile("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL/content.json") # Clone 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT to 15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc new_site = site.clone( @@ -27,18 +26,14 @@ class TestSite: assert new_site.storage.isFile("index.html") assert new_site.storage.isFile("data/users/content.json") assert new_site.storage.isFile("data/zeroblog.db") - assert new_site.storage.verifyFiles()["bad_files"] == [] # No bad files allowed + assert new_site.storage.verifyFiles() == [] # No bad files allowed assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "MyZeroBlog" - # Optional files should be removed - - assert len(new_site.storage.loadJson("content.json").get("files_optional", {})) == 0 - # Test re-cloning (updating) # Changes in non-data files should be overwritten - new_site.storage.write("index.html", b"this will be overwritten") - assert new_site.storage.read("index.html") == b"this will be overwritten" + new_site.storage.write("index.html", "this will be overwritten") + assert new_site.storage.read("index.html"), "this will be overwritten" # Changes in data file should be kept after re-cloning changed_contentjson = new_site.storage.loadJson("content.json") @@ -62,7 +57,7 @@ class TestSite: # Delete created files new_site.storage.deleteFiles() - assert not os.path.isdir(TEST_DATA_PATH + "/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") + assert not os.path.isdir("src/Test/testdata/159EGD5srUsMP97UpcLy8AtKQbQLK2AbbL") # Delete from site registry assert new_site.address in SiteManager.site_manager.sites diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py index cd0a4c9f..146e835e 100644 --- a/src/Test/TestSiteDownload.py +++ b/src/Test/TestSiteDownload.py @@ -3,21 +3,21 @@ import time import pytest import mock import gevent -import gevent.event -import os from Connection import ConnectionServer from Config import config from File import FileRequest from File import FileServer -from Site.Site import Site -from . import Spy +from Site import Site +import Spy @pytest.mark.usefixtures("resetTempSettings") @pytest.mark.usefixtures("resetSettings") class TestSiteDownload: - def testRename(self, file_server, site, site_temp): + def testDownload(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address @@ -26,121 +26,53 @@ class TestSiteDownload: file_server.sites[site.address] = site # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = ConnectionServer("127.0.0.1", 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - site_temp.addPeer(file_server.ip, 1544) - - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.storage.isFile("content.json") - - # Rename non-optional file - os.rename(site.storage.getPath("data/img/domain.png"), site.storage.getPath("data/img/domain-new.png")) - - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - content = site.storage.loadJson("content.json") - assert "data/img/domain-new.png" in content["files"] - assert "data/img/domain.png" not in content["files"] - assert not site_temp.storage.isFile("data/img/domain-new.png") - assert site_temp.storage.isFile("data/img/domain.png") - settings_before = site_temp.settings - + site_temp.addPeer("127.0.0.1", 1544) with Spy.Spy(FileRequest, "route") as requests: - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - assert "streamFile" not in [req[1] for req in requests] + def boostRequest(inner_path): + # I really want these file + if inner_path == "index.html": + site_temp.needFile("data/img/multiuser.png", priority=5, blocking=False) + site_temp.needFile("data/img/direct_domains.png", priority=5, blocking=False) + site_temp.onFileDone.append(boostRequest) + site_temp.download(blind_includes=True).join(timeout=5) + file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")] + # Test priority + assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files + assert file_requests[2:4] == ["css/all.css", "js/all.js"] # Important assets + assert file_requests[4] == "dbschema.json" # Database map + assert file_requests[5:7] == ["data/img/multiuser.png", "data/img/direct_domains.png"] # Directly requested files + assert "-default" in file_requests[-1] # Put default files for cloning to the end - content = site_temp.storage.loadJson("content.json") - assert "data/img/domain-new.png" in content["files"] - assert "data/img/domain.png" not in content["files"] - assert site_temp.storage.isFile("data/img/domain-new.png") - assert not site_temp.storage.isFile("data/img/domain.png") + # Check files + bad_files = site_temp.storage.verifyFiles(quick_check=True) - assert site_temp.settings["size"] == settings_before["size"] - assert site_temp.settings["size_optional"] == settings_before["size_optional"] + # -1 because data/users/1J6... user has invalid cert + assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1 + assert not bad_files assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections] - def testRenameOptional(self, file_server, site, site_temp): - assert site.storage.directory == config.data_dir + "/" + site.address - assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - - site_temp.addPeer(file_server.ip, 1544) - - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.settings["optional_downloaded"] == 0 - - site_temp.needFile("data/optional.txt") - - assert site_temp.settings["optional_downloaded"] > 0 - settings_before = site_temp.settings - hashfield_before = site_temp.content_manager.hashfield.tobytes() - - # Rename optional file - os.rename(site.storage.getPath("data/optional.txt"), site.storage.getPath("data/optional-new.txt")) - - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", remove_missing_optional=True) - - content = site.storage.loadJson("content.json") - assert "data/optional-new.txt" in content["files_optional"] - assert "data/optional.txt" not in content["files_optional"] - assert not site_temp.storage.isFile("data/optional-new.txt") - assert site_temp.storage.isFile("data/optional.txt") - - with Spy.Spy(FileRequest, "route") as requests: - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - assert "streamFile" not in [req[1] for req in requests] - - content = site_temp.storage.loadJson("content.json") - assert "data/optional-new.txt" in content["files_optional"] - assert "data/optional.txt" not in content["files_optional"] - assert site_temp.storage.isFile("data/optional-new.txt") - assert not site_temp.storage.isFile("data/optional.txt") - - assert site_temp.settings["size"] == settings_before["size"] - assert site_temp.settings["size_optional"] == settings_before["size_optional"] - assert site_temp.settings["optional_downloaded"] == settings_before["optional_downloaded"] - assert site_temp.content_manager.hashfield.tobytes() == hashfield_before - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] - - def testArchivedDownload(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Download normally - site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] + site_temp.addPeer("127.0.0.1", 1544) + site_temp.download(blind_includes=True).join(timeout=5) + bad_files = site_temp.storage.verifyFiles(quick_check=True) assert not bad_files assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents @@ -148,7 +80,7 @@ class TestSiteDownload: assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 # Add archived data - assert "archived" not in site.content_manager.contents["data/users/content.json"]["user_contents"] + assert not "archived" in site.content_manager.contents["data/users/content.json"]["user_contents"] assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()} @@ -162,8 +94,7 @@ class TestSiteDownload: # Push archived update assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download + site_temp.download(blind_includes=True).join(timeout=5) # Wait for download # The archived content should disappear from remote client assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] @@ -175,71 +106,23 @@ class TestSiteDownload: assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections] - def testArchivedBeforeDownload(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Download normally - site_temp.addPeer(file_server.ip, 1544) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] - - assert not bad_files - assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents - assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") - assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 - - # Add archived data - assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"] - assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) - - content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"] - site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time - site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] - assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1) - assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived) - assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later - - # Push archived update - assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - - # The archived content should disappear from remote client - assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] - assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents - assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") - assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1 - assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0 - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] - - # Test when connected peer has the optional file def testOptionalDownload(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server - client = ConnectionServer(file_server.ip, 1545) + client = ConnectionServer("127.0.0.1", 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - site_temp.addPeer(file_server.ip, 1544) + site_temp.addPeer("127.0.0.1", 1544) # Download site - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt site.storage.verifyFiles(quick_check=True) # Find what optional files we have @@ -269,24 +152,21 @@ class TestSiteDownload: # Test when connected peer does not has the file, so ask him if he know someone who has it def testFindOptional(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - file_server_full = FileServer(file_server.ip, 1546) + file_server_full = FileServer("127.0.0.1", 1546) site_full.connection_server = file_server_full - - def listen(): - ConnectionServer.start(file_server_full) - ConnectionServer.listen(file_server_full) - - gevent.spawn(listen) + gevent.spawn(lambda: ConnectionServer.start(file_server_full)) time.sleep(0.001) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files - site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server + site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server hashfield = site_full_peer.updateHashfield() # Update hashfield assert len(site_full.content_manager.hashfield) == 8 assert hashfield @@ -299,12 +179,12 @@ class TestSiteDownload: site.content_manager.hashfield.remove(hash) # Init client server - site_temp.connection_server = ConnectionServer(file_server.ip, 1545) - site_temp.addPeer(file_server.ip, 1544) # Add source server + site_temp.connection_server = ConnectionServer("127.0.0.1", 1545) + site_temp.addPeer("127.0.0.1", 1544) # Add source server # Download normal files site_temp.log.info("Start Downloading site") - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) + site_temp.download(blind_includes=True).join(timeout=5) # Download optional data/optional.txt optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt") @@ -326,7 +206,7 @@ class TestSiteDownload: threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False)) gevent.joinall(threads) - assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once + assert len([request for request in requests if request[0] == "findHashIds"]) == 1 # findHashids should call only once assert site_temp.storage.isFile("data/optional.txt") assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") @@ -334,9 +214,10 @@ class TestSiteDownload: assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections] - site_full.content_manager.contents.db.close("FindOptional test end") def testUpdate(self, file_server, site, site_temp): + file_server.ip_incoming = {} # Reset flood protection + assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address @@ -345,8 +226,8 @@ class TestSiteDownload: file_server.sites[site.address] = site # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Don't try to find peers from the net @@ -354,15 +235,14 @@ class TestSiteDownload: site_temp.announce = mock.MagicMock(return_value=True) # Connect peers - site_temp.addPeer(file_server.ip, 1544) + site_temp.addPeer("127.0.0.1", 1544) # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert len(site_temp.bad_files) == 1 + site_temp.download(blind_includes=True).join(timeout=5) # Update file data_original = site.storage.open("data/data.json").read() - data_new = data_original.replace(b'"ZeroBlog"', b'"UpdatedZeroBlog"') + data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"') assert data_original != data_new site.storage.open("data/data.json", "wb").write(data_new) @@ -376,20 +256,19 @@ class TestSiteDownload: site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") site.publish() time.sleep(0.1) - site.log.info("Downloading site") - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1 + site_temp.download(blind_includes=True).join(timeout=5) + assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 1 assert site_temp.storage.open("data/data.json").read() == data_new # Close connection to avoid update spam limit - list(site.peers.values())[0].remove() - site.addPeer(file_server.ip, 1545) - list(site_temp.peers.values())[0].ping() # Connect back + site.peers.values()[0].remove() + site.addPeer("127.0.0.1", 1545) + site_temp.peers.values()[0].ping() # Connect back time.sleep(0.1) # Update with patch - data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') + data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"') assert data_original != data_new site.storage.open("data/data.json-new", "wb").write(data_new) @@ -402,161 +281,17 @@ class TestSiteDownload: assert not site.storage.isFile("data/data.json-new") # New data file removed assert site.storage.open("data/data.json").read() == data_new # -new postfix removed assert "data/data.json" in diffs - assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', [b'\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] + assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)] # Publish with patch site.log.info("Publish new data.json with patch") with Spy.Spy(FileRequest, "route") as requests: site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - event_done = gevent.event.AsyncResult() site.publish(diffs=diffs) - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert [request for request in requests if request[1] in ("getFile", "streamFile")] == [] + site_temp.download(blind_includes=True).join(timeout=5) + assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 0 assert site_temp.storage.open("data/data.json").read() == data_new assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections] - - def testBigUpdate(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Connect peers - site_temp.addPeer(file_server.ip, 1544) - - # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - assert list(site_temp.bad_files.keys()) == ["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] - - # Update file - data_original = site.storage.open("data/data.json").read() - data_new = data_original.replace(b'"ZeroBlog"', b'"PatchedZeroBlog"') - assert data_original != data_new - - site.storage.open("data/data.json-new", "wb").write(data_new) - - assert site.storage.open("data/data.json-new").read() == data_new - assert site_temp.storage.open("data/data.json").read() != data_new - - # Generate diff - diffs = site.content_manager.getDiffs("content.json") - assert not site.storage.isFile("data/data.json-new") # New data file removed - assert site.storage.open("data/data.json").read() == data_new # -new postfix removed - assert "data/data.json" in diffs - - content_json = site.storage.loadJson("content.json") - content_json["description"] = "BigZeroBlog" * 1024 * 10 - site.storage.writeJson("content.json", content_json) - site.content_manager.loadContent("content.json", force=True) - - # Publish with patch - site.log.info("Publish new data.json with patch") - with Spy.Spy(FileRequest, "route") as requests: - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - assert site.storage.getSize("content.json") > 10 * 1024 # Make it a big content.json - site.publish(diffs=diffs) - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - file_requests = [request for request in requests if request[1] in ("getFile", "streamFile")] - assert len(file_requests) == 1 - - assert site_temp.storage.open("data/data.json").read() == data_new - assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() - - # Test what happened if the content.json of the site is bigger than the site limit - def testHugeContentSiteUpdate(self, file_server, site, site_temp): - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - - # Connect peers - site_temp.addPeer(file_server.ip, 1544) - - # Download site from site to site_temp - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - site_temp.settings["size_limit"] = int(20 * 1024 *1024) - site_temp.saveSettings() - - # Raise limit size to 20MB on site so it can be signed - site.settings["size_limit"] = int(20 * 1024 *1024) - site.saveSettings() - - content_json = site.storage.loadJson("content.json") - content_json["description"] = "PartirUnJour" * 1024 * 1024 - site.storage.writeJson("content.json", content_json) - changed, deleted = site.content_manager.loadContent("content.json", force=True) - - # Make sure we have 2 differents content.json - assert site_temp.storage.open("content.json").read() != site.storage.open("content.json").read() - - # Generate diff - diffs = site.content_manager.getDiffs("content.json") - - # Publish with patch - site.log.info("Publish new content.json bigger than 10MB") - with Spy.Spy(FileRequest, "route") as requests: - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - assert site.storage.getSize("content.json") > 10 * 1024 * 1024 # verify it over 10MB - time.sleep(0.1) - site.publish(diffs=diffs) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - assert site_temp.storage.getSize("content.json") < site_temp.getSizeLimit() * 1024 * 1024 - assert site_temp.storage.open("content.json").read() == site.storage.open("content.json").read() - - def testUnicodeFilename(self, file_server, site, site_temp): - assert site.storage.directory == config.data_dir + "/" + site.address - assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address - - # Init source server - site.connection_server = file_server - file_server.sites[site.address] = site - - # Init client server - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} - site_temp.connection_server = client - site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - - site_temp.addPeer(file_server.ip, 1544) - - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) - - site.storage.write("data/img/árvíztűrő.png", b"test") - - site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") - - content = site.storage.loadJson("content.json") - assert "data/img/árvíztűrő.png" in content["files"] - assert not site_temp.storage.isFile("data/img/árvíztűrő.png") - settings_before = site_temp.settings - - with Spy.Spy(FileRequest, "route") as requests: - site.publish() - time.sleep(0.1) - assert site_temp.download(blind_includes=True, retry_bad_files=False).get(timeout=10) # Wait for download - assert len([req[1] for req in requests if req[1] == "streamFile"]) == 1 - - content = site_temp.storage.loadJson("content.json") - assert "data/img/árvíztűrő.png" in content["files"] - assert site_temp.storage.isFile("data/img/árvíztűrő.png") - - assert site_temp.settings["size"] == settings_before["size"] - assert site_temp.settings["size_optional"] == settings_before["size_optional"] - - assert site_temp.storage.deleteFiles() - [connection.close() for connection in file_server.connections] diff --git a/src/Test/TestSiteStorage.py b/src/Test/TestSiteStorage.py index f11262bf..d21405fb 100644 --- a/src/Test/TestSiteStorage.py +++ b/src/Test/TestSiteStorage.py @@ -19,7 +19,4 @@ class TestSiteStorage: assert "css/all.css" not in list_root # Subdir - assert set(site.storage.list("data-default")) == set(["data.json", "users"]) - - def testDbRebuild(self, site): - assert site.storage.rebuildDb() + assert list(site.storage.list("data-default")) == ["data.json", "users"] diff --git a/src/Test/TestThreadPool.py b/src/Test/TestThreadPool.py deleted file mode 100644 index 5e95005e..00000000 --- a/src/Test/TestThreadPool.py +++ /dev/null @@ -1,163 +0,0 @@ -import time -import threading - -import gevent -import pytest - -from util import ThreadPool - - -class TestThreadPool: - def testExecutionOrder(self): - with ThreadPool.ThreadPool(4) as pool: - events = [] - - @pool.wrap - def blocker(): - events.append("S") - out = 0 - for i in range(10000000): - if i == 3000000: - events.append("M") - out += 1 - events.append("D") - return out - - threads = [] - for i in range(3): - threads.append(gevent.spawn(blocker)) - gevent.joinall(threads) - - assert events == ["S"] * 3 + ["M"] * 3 + ["D"] * 3 - - res = blocker() - assert res == 10000000 - - def testLockBlockingSameThread(self): - lock = ThreadPool.Lock() - - s = time.time() - - def unlocker(): - time.sleep(1) - lock.release() - - gevent.spawn(unlocker) - lock.acquire(True) - lock.acquire(True, timeout=2) - - unlock_taken = time.time() - s - - assert 1.0 < unlock_taken < 1.5 - - def testLockBlockingDifferentThread(self): - lock = ThreadPool.Lock() - - def locker(): - lock.acquire(True) - time.sleep(0.5) - lock.release() - - with ThreadPool.ThreadPool(10) as pool: - threads = [ - pool.spawn(locker), - pool.spawn(locker), - gevent.spawn(locker), - pool.spawn(locker) - ] - time.sleep(0.1) - - s = time.time() - - lock.acquire(True, 5.0) - - unlock_taken = time.time() - s - - assert 1.8 < unlock_taken < 2.2 - - gevent.joinall(threads) - - def testMainLoopCallerThreadId(self): - main_thread_id = threading.current_thread().ident - with ThreadPool.ThreadPool(5) as pool: - def getThreadId(*args, **kwargs): - return threading.current_thread().ident - - t = pool.spawn(getThreadId) - assert t.get() != main_thread_id - - t = pool.spawn(lambda: ThreadPool.main_loop.call(getThreadId)) - assert t.get() == main_thread_id - - def testMainLoopCallerGeventSpawn(self): - main_thread_id = threading.current_thread().ident - with ThreadPool.ThreadPool(5) as pool: - def waiter(): - time.sleep(1) - return threading.current_thread().ident - - def geventSpawner(): - event = ThreadPool.main_loop.call(gevent.spawn, waiter) - - with pytest.raises(Exception) as greenlet_err: - event.get() - assert str(greenlet_err.value) == "cannot switch to a different thread" - - waiter_thread_id = ThreadPool.main_loop.call(event.get) - return waiter_thread_id - - s = time.time() - waiter_thread_id = pool.apply(geventSpawner) - assert main_thread_id == waiter_thread_id - time_taken = time.time() - s - assert 0.9 < time_taken < 1.2 - - def testEvent(self): - with ThreadPool.ThreadPool(5) as pool: - event = ThreadPool.Event() - - def setter(): - time.sleep(1) - event.set("done!") - - def getter(): - return event.get() - - pool.spawn(setter) - t_gevent = gevent.spawn(getter) - t_pool = pool.spawn(getter) - s = time.time() - assert event.get() == "done!" - time_taken = time.time() - s - gevent.joinall([t_gevent, t_pool]) - - assert t_gevent.get() == "done!" - assert t_pool.get() == "done!" - - assert 0.9 < time_taken < 1.2 - - with pytest.raises(Exception) as err: - event.set("another result") - - assert "Event already has value" in str(err.value) - - def testMemoryLeak(self): - import gc - thread_objs_before = [id(obj) for obj in gc.get_objects() if "threadpool" in str(type(obj))] - - def worker(): - time.sleep(0.1) - return "ok" - - def poolTest(): - with ThreadPool.ThreadPool(5) as pool: - for i in range(20): - pool.spawn(worker) - - for i in range(5): - poolTest() - new_thread_objs = [obj for obj in gc.get_objects() if "threadpool" in str(type(obj)) and id(obj) not in thread_objs_before] - #print("New objs:", new_thread_objs, "run:", num_run) - - # Make sure no threadpool object left behind - assert not new_thread_objs diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py index e6b82c1a..ec154f99 100644 --- a/src/Test/TestTor.py +++ b/src/Test/TestTor.py @@ -1,11 +1,8 @@ +import pytest import time -import pytest -import mock - from File import FileServer -from Crypt import CryptTor -from Config import config +from Crypt import CryptRsa @pytest.mark.usefixtures("resetSettings") @pytest.mark.usefixtures("resetTempSettings") @@ -34,49 +31,49 @@ class TestTor: address = tor_manager.addOnion() # Sign - sign = CryptTor.sign(b"hello", tor_manager.getPrivatekey(address)) + sign = CryptRsa.sign("hello", tor_manager.getPrivatekey(address)) assert len(sign) == 128 # Verify - publickey = CryptTor.privatekeyToPublickey(tor_manager.getPrivatekey(address)) + publickey = CryptRsa.privatekeyToPublickey(tor_manager.getPrivatekey(address)) assert len(publickey) == 140 - assert CryptTor.verify(b"hello", publickey, sign) - assert not CryptTor.verify(b"not hello", publickey, sign) + assert CryptRsa.verify("hello", publickey, sign) + assert not CryptRsa.verify("not hello", publickey, sign) # Pub to address - assert CryptTor.publickeyToOnion(publickey) == address + assert CryptRsa.publickeyToOnion(publickey) == address # Delete tor_manager.delOnion(address) - @pytest.mark.slow + @pytest.mark.skipif(not pytest.config.getvalue("slow"), reason="--slow not requested (takes around ~ 1min)") def testConnection(self, tor_manager, file_server, site, site_temp): file_server.tor_manager.start_onions = True address = file_server.tor_manager.getOnion(site.address) assert address - print("Connecting to", address) + print "Connecting to", address for retry in range(5): # Wait for hidden service creation time.sleep(10) try: - connection = file_server.getConnection(address + ".onion", 1544) + connection = file_server.getConnection(address+".onion", 1544) if connection: break - except Exception as err: + except Exception, err: continue assert connection.handshake assert not connection.handshake["peer_id"] # No peer_id for Tor connections # Return the same connection without site specified - assert file_server.getConnection(address + ".onion", 1544) == connection + assert file_server.getConnection(address+".onion", 1544) == connection # No reuse for different site - assert file_server.getConnection(address + ".onion", 1544, site=site) != connection - assert file_server.getConnection(address + ".onion", 1544, site=site) == file_server.getConnection(address + ".onion", 1544, site=site) + assert file_server.getConnection(address+".onion", 1544, site=site) != connection + assert file_server.getConnection(address+".onion", 1544, site=site) == file_server.getConnection(address+".onion", 1544, site=site) site_temp.address = "1OTHERSITE" - assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp) + assert file_server.getConnection(address+".onion", 1544, site=site) != file_server.getConnection(address+".onion", 1544, site=site_temp) # Only allow to query from the locked site file_server.sites[site.address] = site - connection_locked = file_server.getConnection(address + ".onion", 1544, site=site) + connection_locked = file_server.getConnection(address+".onion", 1544, site=site) assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site" @@ -85,30 +82,24 @@ class TestTor: site.connection_server = file_server file_server.sites[site.address] = site # Create a new file server to emulate new peer connecting to our peer - file_server_temp = FileServer(file_server.ip, 1545) + file_server_temp = FileServer("127.0.0.1", 1545) site_temp.connection_server = file_server_temp file_server_temp.sites[site_temp.address] = site_temp - # We will request peers from this - peer_source = site_temp.addPeer(file_server.ip, 1544) + peer_source = site_temp.addPeer("127.0.0.1", 1544) # Get ip4 peers from source site + assert peer_source.pex(need_num=10) == 1 # Need >5 to return also return non-connected peers + assert len(site_temp.peers) == 2 # Me, and the other peer site.addPeer("1.2.3.4", 1555) # Add peer to source site assert peer_source.pex(need_num=10) == 1 - assert len(site_temp.peers) == 2 + assert len(site_temp.peers) == 3 assert "1.2.3.4:1555" in site_temp.peers # Get onion peers from source site site.addPeer("bka4ht2bzxchy44r.onion", 1555) assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers - - # Don't add onion peers if not supported - assert "onion" not in file_server_temp.supported_ip_types - assert peer_source.pex(need_num=10) == 0 - - file_server_temp.supported_ip_types.append("onion") - assert peer_source.pex(need_num=10) == 1 - + assert peer_source.pex(need_num=10) == 1 # Need >5 to return also return non-connected peers assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers def testFindHash(self, tor_manager, file_server, site, site_temp): @@ -116,12 +107,12 @@ class TestTor: file_server.sites[site.address] = site file_server.tor_manager = tor_manager - client = FileServer(file_server.ip, 1545) - client.sites = {site_temp.address: site_temp} + client = FileServer("127.0.0.1", 1545) + client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client - peer_file_server = site_temp.addPeer(file_server.ip, 1544) + peer_file_server = site_temp.addPeer("127.0.0.1", 1544) assert peer_file_server.findHashIds([1234]) == {} @@ -135,19 +126,19 @@ class TestTor: fake_peer_3.hashfield.append(1235) fake_peer_3.hashfield.append(1236) - res = peer_file_server.findHashIds([1234, 1235]) - - assert sorted(res[1234]) == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)] - assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] + assert peer_file_server.findHashIds([1234, 1235]) == { + 1234: [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544)], + 1235: [('1.2.3.6', 1546), ('1.2.3.5', 1545)] + } # Test my address adding site.content_manager.hashfield.append(1234) + my_onion_address = tor_manager.getOnion(site_temp.address)+".onion" res = peer_file_server.findHashIds([1234, 1235]) - assert sorted(res[1234]) == [('1.2.3.5', 1545), (file_server.ip, 1544), ("bka4ht2bzxchy44r.onion", 1544)] - assert sorted(res[1235]) == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] + assert res[1234] == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544), (my_onion_address, 1544)] + assert res[1235] == [('1.2.3.6', 1546), ('1.2.3.5', 1545)] def testSiteOnion(self, tor_manager): - with mock.patch.object(config, "tor", "always"): - assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2") - assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1") + assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2") + assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1") diff --git a/src/Test/TestTranslate.py b/src/Test/TestTranslate.py index 348a65a6..b4c2ae2d 100644 --- a/src/Test/TestTranslate.py +++ b/src/Test/TestTranslate.py @@ -1,3 +1,5 @@ +import os + from Translate import Translate class TestTranslate: @@ -11,6 +13,7 @@ class TestTranslate: assert 'translated = _("translated")' in data_translated assert 'not_translated = "original"' in data_translated + def testTranslateStrictNamed(self): translate = Translate() data = """ @@ -21,41 +24,3 @@ class TestTranslate: data_translated = translate.translateData(data, {"_(original, original named)": "translated"}) assert 'translated = _("translated")' in data_translated assert 'not_translated = "original"' in data_translated - - def testTranslateUtf8(self): - translate = Translate() - data = """ - greeting = "Hi again árvztűrőtökörfúrógép!" - """ - data_translated = translate.translateData(data, {"Hi again árvztűrőtökörfúrógép!": "Üdv újra árvztűrőtökörfúrógép!"}) - assert data_translated == """ - greeting = "Üdv újra árvztűrőtökörfúrógép!" - """ - - def testTranslateEscape(self): - _ = Translate() - _["Hello"] = "Szia" - - # Simple escaping - data = "{_[Hello]} {username}!" - username = "Hacker" - data_translated = _(data) - assert 'Szia' in data_translated - assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" - - # Escaping dicts - user = {"username": "Hacker"} - data = "{_[Hello]} {user[username]}!" - data_translated = _(data) - assert 'Szia' in data_translated - assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" - - # Escaping lists - users = [{"username": "Hacker"}] - data = "{_[Hello]} {users[0][username]}!" - data_translated = _(data) - assert 'Szia' in data_translated - assert '<' not in data_translated - assert data_translated == "Szia Hacker<script>alert('boom')</script>!" diff --git a/src/Test/TestUiWebsocket.py b/src/Test/TestUiWebsocket.py deleted file mode 100644 index d2d23d03..00000000 --- a/src/Test/TestUiWebsocket.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys -import pytest - -@pytest.mark.usefixtures("resetSettings") -class TestUiWebsocket: - def testPermission(self, ui_websocket): - res = ui_websocket.testAction("ping") - assert res == "pong" - - res = ui_websocket.testAction("certList") - assert "You don't have permission" in res["error"] diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py index f17c77bd..f77d7f8d 100644 --- a/src/Test/TestUpnpPunch.py +++ b/src/Test/TestUpnpPunch.py @@ -1,5 +1,5 @@ import socket -from urllib.parse import urlparse +from urlparse import urlparse import pytest import mock @@ -10,7 +10,7 @@ from util import UpnpPunch as upnp @pytest.fixture def mock_socket(): mock_socket = mock.MagicMock() - mock_socket.recv = mock.MagicMock(return_value=b'Hello') + mock_socket.recv = mock.MagicMock(return_value='Hello') mock_socket.bind = mock.MagicMock() mock_socket.send_to = mock.MagicMock() @@ -79,12 +79,12 @@ class TestUpnpPunch(object): upnp._retrieve_location_from_ssdp(rsp) def test_retrieve_igd_profile(self, url_obj): - with mock.patch('urllib.request.urlopen') as mock_urlopen: + with mock.patch('urllib2.urlopen') as mock_urlopen: upnp._retrieve_igd_profile(url_obj) mock_urlopen.assert_called_with(url_obj.geturl(), timeout=5) def test_retrieve_igd_profile_timeout(self, url_obj): - with mock.patch('urllib.request.urlopen') as mock_urlopen: + with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.side_effect = socket.error('Timeout error') with pytest.raises(upnp.IGDError): upnp._retrieve_igd_profile(url_obj) @@ -126,9 +126,9 @@ class TestUpnpPunch(object): def test_parse_for_errors_bad_rsp(self, httplib_response): rsp = httplib_response(status=500) - with pytest.raises(upnp.IGDError) as err: + with pytest.raises(upnp.IGDError) as exc: upnp._parse_for_errors(rsp) - assert 'Unable to parse' in str(err.value) + assert 'Unable to parse' in exc.value.message def test_parse_for_errors_error(self, httplib_response): soap_error = ('' @@ -136,9 +136,9 @@ class TestUpnpPunch(object): 'Bad request' '') rsp = httplib_response(status=500, body=soap_error) - with pytest.raises(upnp.IGDError) as err: + with pytest.raises(upnp.IGDError) as exc: upnp._parse_for_errors(rsp) - assert 'SOAP request error' in str(err.value) + assert 'SOAP request error' in exc.value.message def test_parse_for_errors_good_rsp(self, httplib_response): rsp = httplib_response(status=200) @@ -176,7 +176,7 @@ class TestUpnpPunch(object): soap_msg = mock_send_requests.call_args[0][0][0][0] - assert result is True + assert result is None assert mock_collect_idg.called assert '192.168.0.12' in soap_msg diff --git a/src/Test/TestUser.py b/src/Test/TestUser.py index e5ec5c8c..1fcdd1b7 100644 --- a/src/Test/TestUser.py +++ b/src/Test/TestUser.py @@ -7,7 +7,7 @@ from Crypt import CryptBitcoin class TestUser: def testAddress(self, user): assert user.master_address == "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc" - address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811 + address_index = 1458664252141532163166741013621928587528255888800826689784628722366466547364755811L assert user.getAddressAuthIndex("15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc") == address_index # Re-generate privatekey based on address_index diff --git a/src/Test/TestWeb.py b/src/Test/TestWeb.py index 2ce66c98..8cbce1cf 100644 --- a/src/Test/TestWeb.py +++ b/src/Test/TestWeb.py @@ -1,10 +1,10 @@ -import urllib.request +import urllib import pytest try: from selenium.webdriver.support.ui import WebDriverWait - from selenium.webdriver.support.expected_conditions import staleness_of, title_is + from selenium.webdriver.support.expected_conditions import staleness_of from selenium.common.exceptions import NoSuchElementException except: pass @@ -18,15 +18,11 @@ class WaitForPageLoad(object): self.old_page = self.browser.find_element_by_tag_name('html') def __exit__(self, *args): - WebDriverWait(self.browser, 10).until(staleness_of(self.old_page)) + WebDriverWait(self.browser, 5).until(staleness_of(self.old_page)) -def getContextUrl(browser): - return browser.execute_script("return window.location.toString()") - - -def getUrl(url): - content = urllib.request.urlopen(url).read() +def wget(url): + content = urllib.urlopen(url).read() assert "server error" not in content.lower(), "Got a server error! " + repr(url) return content @@ -34,45 +30,35 @@ def getUrl(url): @pytest.mark.webtest class TestWeb: def testFileSecurity(self, site_url): - assert "Not Found" in getUrl("%s/media/sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/./sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/../config.py" % site_url) - assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) - assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) + assert "Not Found" in wget("%s/media/./sites.json" % site_url) + assert "Forbidden" in wget("%s/media/../config.py" % site_url) + assert "Forbidden" in wget("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) + assert "Forbidden" in wget("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) + assert "Forbidden" in wget("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) + assert "Forbidden" in wget("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) + assert "Forbidden" in wget("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) + assert "Forbidden" in wget("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) - assert "Not Found" in getUrl("%s/raw/sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/./sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/../config.py" % site_url) - assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) - assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) - - assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) - assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) - assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) - - assert "Forbidden" in getUrl("%s/content.db" % site_url) - assert "Forbidden" in getUrl("%s/./users.json" % site_url) - assert "Forbidden" in getUrl("%s/./key-rsa.pem" % site_url) - assert "Forbidden" in getUrl("%s/././././././././././//////sites.json" % site_url) + assert "Forbidden" in wget("%s/content.db" % site_url) + assert "Forbidden" in wget("%s/./users.json" % site_url) + assert "Forbidden" in wget("%s/./key-rsa.pem" % site_url) + assert "Forbidden" in wget("%s/././././././././././//////sites.json" % site_url) def testLinkSecurity(self, browser, site_url): browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url) - WebDriverWait(browser, 10).until(title_is("ZeroHello - ZeroNet")) - assert getContextUrl(browser) == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url + assert browser.title == "ZeroHello - ZeroNet" + assert browser.current_url == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url # Switch to inner frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) - assert "wrapper_nonce" in getContextUrl(browser) - assert browser.find_element_by_id("script_output").text == "Result: Works" + assert "wrapper_nonce" in browser.current_url browser.switch_to.default_content() # Clicking on links without target browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) with WaitForPageLoad(browser): browser.find_element_by_id("link_to_current").click() - assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content + assert "wrapper_nonce" not in browser.current_url # The browser object back to default content assert "Forbidden" not in browser.page_source # Check if we have frame inside frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) @@ -84,22 +70,15 @@ class TestWeb: browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) with WaitForPageLoad(browser): browser.find_element_by_id("link_to_top").click() - assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content + assert "wrapper_nonce" not in browser.current_url # The browser object back to default content assert "Forbidden" not in browser.page_source browser.switch_to.default_content() # Try to escape from inner_frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) - assert "wrapper_nonce" in getContextUrl(browser) # Make sure we are inside of the inner-iframe + assert "wrapper_nonce" in browser.current_url # Make sure we are inside of the inner-iframe with WaitForPageLoad(browser): browser.execute_script("window.top.location = window.location") - assert "wrapper_nonce" in getContextUrl(browser) # We try to use nonce-ed html without iframe - assert " 0.1: - line_marker = "!" - elif since_last > 0.02: - line_marker = "*" - elif since_last > 0.01: - line_marker = "-" - else: - line_marker = " " - - since_start = time.time() - time_start - record.since_start = "%s%.3fs" % (line_marker, since_start) - - self.time_last = time.time() - return True - -log = logging.getLogger() -fmt = logging.Formatter(fmt='%(since_start)s %(thread_marker)s %(levelname)-8s %(name)s %(message)s %(thread_title)s') -[hndl.addFilter(TimeFilter()) for hndl in log.handlers] -[hndl.setFormatter(fmt) for hndl in log.handlers] - -from Site.Site import Site +from Site import Site from Site import SiteManager from User import UserManager from File import FileServer from Connection import ConnectionServer from Crypt import CryptConnection -from Crypt import CryptBitcoin from Ui import UiWebsocket from Tor import TorManager from Content import ContentDb from util import RateLimit from Db import Db -from Debug import Debug -gevent.get_hub().NOT_ERROR += (Debug.Notify,) +# SiteManager.site_manager.load = mock.MagicMock(return_value=True) # Don't try to load from sites.json +# SiteManager.site_manager.save = mock.MagicMock(return_value=True) # Don't try to load from sites.json -def cleanup(): - Db.dbCloseAll() - for dir_path in [config.data_dir, config.data_dir + "-temp"]: - if os.path.isdir(dir_path): - for file_name in os.listdir(dir_path): - ext = file_name.rsplit(".", 1)[-1] - if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]: - continue - file_path = dir_path + "/" + file_name - if os.path.isfile(file_path): - os.unlink(file_path) - -atexit_register(cleanup) @pytest.fixture(scope="session") def resetSettings(request): open("%s/sites.json" % config.data_dir, "w").write("{}") - open("%s/filters.json" % config.data_dir, "w").write("{}") open("%s/users.json" % config.data_dir, "w").write(""" { "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": { @@ -161,14 +80,12 @@ def resetSettings(request): } """) - @pytest.fixture(scope="session") def resetTempSettings(request): data_dir_temp = config.data_dir + "-temp" if not os.path.isdir(data_dir_temp): os.mkdir(data_dir_temp) open("%s/sites.json" % data_dir_temp, "w").write("{}") - open("%s/filters.json" % data_dir_temp, "w").write("{}") open("%s/users.json" % data_dir_temp, "w").write(""" { "15E5rhcAUD69WbiYsYARh4YHJ4sLm2JEyc": { @@ -182,7 +99,6 @@ def resetTempSettings(request): def cleanup(): os.unlink("%s/sites.json" % data_dir_temp) os.unlink("%s/users.json" % data_dir_temp) - os.unlink("%s/filters.json" % data_dir_temp) request.addfinalizer(cleanup) @@ -194,33 +110,27 @@ def site(request): RateLimit.called_db = {} site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") + site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net # Always use original data assert "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" in site.storage.getPath("") # Make sure we dont delete everything shutil.rmtree(site.storage.getPath(""), True) shutil.copytree(site.storage.getPath("") + "-original", site.storage.getPath("")) - - # Add to site manager - SiteManager.site_manager.get("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - site.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - def cleanup(): - site.delete() - site.content_manager.contents.db.close("Test cleanup") - site.content_manager.contents.db.timer_check_optional.kill() - SiteManager.site_manager.sites.clear() + site.storage.deleteFiles() + site.content_manager.contents.db.deleteSite(site) + del SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] + site.content_manager.contents.db.close() db_path = "%s/content.db" % config.data_dir os.unlink(db_path) del ContentDb.content_dbs[db_path] gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) request.addfinalizer(cleanup) - site.greenlet_manager.stopGreenlets() site = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") # Create new Site object to load content.json files if not SiteManager.site_manager.sites: SiteManager.site_manager.sites = {} SiteManager.site_manager.sites["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] = site - site.settings["serving"] = True return site @@ -229,78 +139,54 @@ def site_temp(request): threads_before = [obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet)] with mock.patch("Config.config.data_dir", config.data_dir + "-temp"): site_temp = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - site_temp.settings["serving"] = True site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net def cleanup(): - site_temp.delete() - site_temp.content_manager.contents.db.close("Test cleanup") - site_temp.content_manager.contents.db.timer_check_optional.kill() + site_temp.storage.deleteFiles() + site_temp.content_manager.contents.db.deleteSite(site_temp) + site_temp.content_manager.contents.db.close() db_path = "%s-temp/content.db" % config.data_dir os.unlink(db_path) del ContentDb.content_dbs[db_path] gevent.killall([obj for obj in gc.get_objects() if isinstance(obj, gevent.Greenlet) and obj not in threads_before]) request.addfinalizer(cleanup) - site_temp.log = logging.getLogger("Temp:%s" % site_temp.address_short) return site_temp @pytest.fixture(scope="session") def user(): user = UserManager.user_manager.get() - if not user: - user = UserManager.user_manager.create() user.sites = {} # Reset user data return user @pytest.fixture(scope="session") -def browser(request): +def browser(): try: from selenium import webdriver - print("Starting chromedriver...") - options = webdriver.chrome.options.Options() - options.add_argument("--headless") - options.add_argument("--window-size=1920x1080") - options.add_argument("--log-level=1") - browser = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, service_log_path=os.path.devnull, options=options) - - def quit(): - browser.quit() - request.addfinalizer(quit) - except Exception as err: - raise pytest.skip("Test requires selenium + chromedriver: %s" % err) + print "Starting phantomjs..." + browser = webdriver.PhantomJS(executable_path=PHANTOMJS_PATH, service_log_path=os.path.devnull) + print "Set window size..." + browser.set_window_size(1400, 1000) + except Exception, err: + raise pytest.skip("Test requires selenium + phantomjs: %s" % err) return browser @pytest.fixture(scope="session") def site_url(): try: - urllib.request.urlopen(SITE_URL).read() - except Exception as err: + urllib.urlopen(SITE_URL).read() + except Exception, err: raise pytest.skip("Test requires zeronet client running: %s" % err) return SITE_URL -@pytest.fixture(params=['ipv4', 'ipv6']) -def file_server(request): - if request.param == "ipv4": - return request.getfixturevalue("file_server4") - else: - return request.getfixturevalue("file_server6") - - @pytest.fixture -def file_server4(request): - time.sleep(0.1) +def file_server(request): + request.addfinalizer(CryptConnection.manager.removeCerts) # Remove cert files after end file_server = FileServer("127.0.0.1", 1544) - file_server.ip_external = "1.2.3.4" # Fake external ip - - def listen(): - ConnectionServer.start(file_server) - ConnectionServer.listen(file_server) - - gevent.spawn(listen) + gevent.spawn(lambda: ConnectionServer.start(file_server)) # Wait for port opening for retry in range(10): time.sleep(0.1) # Port opening @@ -308,10 +194,9 @@ def file_server4(request): conn = file_server.getConnection("127.0.0.1", 1544) conn.close() break - except Exception as err: - print("FileServer6 startup error", Debug.formatException(err)) + except Exception, err: + print err assert file_server.running - file_server.ip_incoming = {} # Reset flood protection def stop(): file_server.stop() @@ -319,68 +204,22 @@ def file_server4(request): return file_server -@pytest.fixture -def file_server6(request): - try: - sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) - sock.connect(("::1", 80, 1, 1)) - has_ipv6 = True - except OSError: - has_ipv6 = False - if not has_ipv6: - pytest.skip("Ipv6 not supported") - - - time.sleep(0.1) - file_server6 = FileServer("::1", 1544) - file_server6.ip_external = 'fca5:95d6:bfde:d902:8951:276e:1111:a22c' # Fake external ip - - def listen(): - ConnectionServer.start(file_server6) - ConnectionServer.listen(file_server6) - - gevent.spawn(listen) - # Wait for port opening - for retry in range(10): - time.sleep(0.1) # Port opening - try: - conn = file_server6.getConnection("::1", 1544) - conn.close() - break - except Exception as err: - print("FileServer6 startup error", Debug.formatException(err)) - assert file_server6.running - file_server6.ip_incoming = {} # Reset flood protection - - def stop(): - file_server6.stop() - request.addfinalizer(stop) - return file_server6 - - @pytest.fixture() -def ui_websocket(site, user): +def ui_websocket(site, file_server, user): class WsMock: def __init__(self): - self.result = gevent.event.AsyncResult() + self.result = None def send(self, data): - logging.debug("WsMock: Set result (data: %s) called by %s" % (data, Debug.formatStack())) - self.result.set(json.loads(data)["result"]) - - def getResult(self): - logging.debug("WsMock: Get result") - back = self.result.get() - logging.debug("WsMock: Got result (data: %s)" % back) - self.result = gevent.event.AsyncResult() - return back + self.result = json.loads(data)["result"] ws_mock = WsMock() - ui_websocket = UiWebsocket(ws_mock, site, None, user, None) + ui_websocket = UiWebsocket(ws_mock, site, file_server, user, None) def testAction(action, *args, **kwargs): - ui_websocket.handleRequest({"id": 0, "cmd": action, "params": list(args) if args else kwargs}) - return ui_websocket.ws.getResult() + func = getattr(ui_websocket, "action%s" % action) + func(0, *args, **kwargs) + return ui_websocket.ws.result ui_websocket.testAction = testAction return ui_websocket @@ -389,15 +228,13 @@ def ui_websocket(site, user): @pytest.fixture(scope="session") def tor_manager(): try: - tor_manager = TorManager(fileserver_port=1544) - tor_manager.start() - assert tor_manager.conn is not None + tor_manager = TorManager() + assert tor_manager.connect() tor_manager.startOnions() - except Exception as err: + except Exception, err: raise pytest.skip("Test requires Tor with ControlPort: %s, %s" % (config.tor_controller, err)) return tor_manager - @pytest.fixture() def db(request): db_path = "%s/zeronet.db" % config.data_dir @@ -436,62 +273,12 @@ def db(request): if os.path.isfile(db_path): os.unlink(db_path) - db = Db.Db(schema, db_path) + db = Db(schema, db_path) db.checkTables() def stop(): - db.close("Test db cleanup") + db.close() os.unlink(db_path) request.addfinalizer(stop) return db - - -@pytest.fixture(params=["sslcrypto", "sslcrypto_fallback", "libsecp256k1"]) -def crypt_bitcoin_lib(request, monkeypatch): - monkeypatch.setattr(CryptBitcoin, "lib_verify_best", request.param) - CryptBitcoin.loadLib(request.param) - return CryptBitcoin - -@pytest.fixture(scope='function', autouse=True) -def logCaseStart(request): - global time_start - time_start = time.time() - logging.debug("---- Start test case: %s ----" % request._pyfuncitem) - yield None # Wait until all test done - - -# Workaround for pytest bug when logging in atexit/post-fixture handlers (I/O operation on closed file) -def workaroundPytestLogError(): - import _pytest.capture - write_original = _pytest.capture.EncodedFile.write - - def write_patched(obj, *args, **kwargs): - try: - write_original(obj, *args, **kwargs) - except ValueError as err: - if str(err) == "I/O operation on closed file": - pass - else: - raise err - - def flush_patched(obj, *args, **kwargs): - try: - obj.buffer.flush(*args, **kwargs) - except ValueError as err: - if str(err).startswith("I/O operation on closed file"): - pass - else: - raise err - - _pytest.capture.EncodedFile.write = write_patched - _pytest.capture.EncodedFile.flush = flush_patched - - -workaroundPytestLogError() - -@pytest.fixture(scope='session', autouse=True) -def disableLog(): - yield None # Wait until all test done - logging.getLogger('').setLevel(logging.getLevelName(logging.CRITICAL)) - diff --git a/src/Test/pytest.ini b/src/Test/pytest.ini index 0ffb385f..d09210d1 100644 --- a/src/Test/pytest.ini +++ b/src/Test/pytest.ini @@ -1,6 +1,5 @@ [pytest] python_files = Test*.py -addopts = -rsxX -v --durations=6 --capture=fd +addopts = -rsxX -v --durations=6 markers = - slow: mark a tests as slow. - webtest: mark a test as a webtest. + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/content.json b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/content.json index 786db098..481e85c0 100644 --- a/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/content.json +++ b/src/Test/testdata/1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT-original/content.json @@ -110,24 +110,28 @@ "files_allowed": "data.json", "includes_allowed": false, "max_size": 20000, - "signers": ["15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo"], + "signers": [ "15ik6LeBWnACWfaika1xqGapRZ1zh3JpCo" ], "signers_required": 1, "user_id": 47, "user_name": "test" }, "data/users/content.json": { - "signers": ["1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f"], + "signers": [ "1LSxsKfC9S9TVXGGNSM3vPHjyW82jgCX5f" ], "signers_required": 1 } }, "inner_path": "content.json", - "modified": 1503257990, - "optional": "(data/img/zero.*|data/optional.*)", + "modified": 1470340814.398, + "optional": "(data/img/zero.*|data/optional.txt)", + "sign": [ + 97109682715361435939224827180347249103292065360394744202228746983963408224367, + 11265525339134417763503386504484747758843583699493121728411011831763675881820 + ], "signers_sign": "HDNmWJHM2diYln4pkdL+qYOvgE7MdwayzeG+xEUZBgp1HtOjBJS+knDEVQsBkjcOPicDG2it1r6R1eQrmogqSP0=", "signs": { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G4Uq365UBliQG66ygip1jNGYqW6Eh9Mm7nLguDFqAgk/Hksq/ruqMf9rXv78mgUfPBvL2+XgDKYvFDtlykPFZxk=" + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": "G9zhZM8oiMGFO3CqnU6QyzYKLZXnrOgr+BWOIqj+BWcrIvHBTbrtIkVchmq2VZo9JYTGO/loe0VuC+83BtqsMpM=" }, "signs_required": 1, "title": "ZeroBlog", - "zeronet_version": "0.5.7" + "zeronet_version": "0.3.7" } \ No newline at end of file diff --git a/src/Tor/TorManager.py b/src/Tor/TorManager.py index 865d8fbf..59c5989c 100644 --- a/src/Tor/TorManager.py +++ b/src/Tor/TorManager.py @@ -12,13 +12,14 @@ import atexit import gevent from Config import config - -from lib import Ed25519 -from Crypt import CryptTor - +from Crypt import CryptRsa from Site import SiteManager -import socks -from gevent.lock import RLock +from lib.PySocks import socks +try: + from gevent.coros import RLock +except: + from gevent.lock import RLock +from util import helper from Debug import Debug from Plugin import PluginManager @@ -29,104 +30,121 @@ class TorManager(object): self.privatekeys = {} # Onion: Privatekey self.site_onions = {} # Site address: Onion self.tor_exe = "tools/tor/tor.exe" - self.has_meek_bridges = os.path.isfile("tools/tor/PluggableTransports/meek-client.exe") self.tor_process = None self.log = logging.getLogger("TorManager") self.start_onions = None self.conn = None self.lock = RLock() - self.starting = True - self.connecting = True - self.status = None - self.event_started = gevent.event.AsyncResult() if config.tor == "disable": self.enabled = False self.start_onions = False - self.setStatus("Disabled") + self.status = "Disabled" else: self.enabled = True - self.setStatus("Waiting") + self.status = "Waiting" if fileserver_port: self.fileserver_port = fileserver_port else: self.fileserver_port = config.fileserver_port - self.ip, self.port = config.tor_controller.rsplit(":", 1) + self.ip, self.port = config.tor_controller.split(":") self.port = int(self.port) - self.proxy_ip, self.proxy_port = config.tor_proxy.rsplit(":", 1) + self.proxy_ip, self.proxy_port = config.tor_proxy.split(":") self.proxy_port = int(self.proxy_port) - def start(self): - self.log.debug("Starting (Tor: %s)" % config.tor) - self.starting = True - try: - if not self.connect(): - raise Exception(self.status) - self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) - except Exception as err: - if sys.platform.startswith("win") and os.path.isfile(self.tor_exe): + # Test proxy port + if config.tor != "disable": + try: + assert self.connect(), "No connection" + self.log.debug("Tor proxy port %s check ok" % config.tor_proxy) + except Exception, err: self.log.info("Starting self-bundled Tor, due to Tor proxy port %s check error: %s" % (config.tor_proxy, err)) + self.enabled = False # Change to self-bundled Tor ports + from lib.PySocks import socks self.port = 49051 self.proxy_port = 49050 - if config.tor == "always": - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) - self.enabled = True - if not self.connect(): + socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port) + if os.path.isfile(self.tor_exe): # Already, downloaded: sync mode self.startTor() - else: - self.log.info("Disabling Tor, because error while accessing Tor proxy at port %s: %s" % (config.tor_proxy, err)) - self.enabled = False - - def setStatus(self, status): - self.status = status - if "main" in sys.modules: # import main has side-effects, breaks tests - import main - if "ui_server" in dir(main): - main.ui_server.updateWebsocket() + else: # Not downloaded yet: Async mode + gevent.spawn(self.startTor) def startTor(self): if sys.platform.startswith("win"): try: + if not os.path.isfile(self.tor_exe): + self.downloadTor() + self.log.info("Starting Tor client %s..." % self.tor_exe) tor_dir = os.path.dirname(self.tor_exe) startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - cmd = r"%s -f torrc --defaults-torrc torrc-defaults --ignore-missing-torrc" % self.tor_exe - if config.tor_use_bridges: - cmd += " --UseBridges 1" - - self.tor_process = subprocess.Popen(cmd, cwd=tor_dir, close_fds=True, startupinfo=startupinfo) - for wait in range(1, 3): # Wait for startup + self.tor_process = subprocess.Popen(r"%s -f torrc" % self.tor_exe, cwd=tor_dir, close_fds=True, startupinfo=startupinfo) + for wait in range(1,10): # Wait for startup time.sleep(wait * 0.5) self.enabled = True if self.connect(): - if self.isSubprocessRunning(): - self.request("TAKEOWNERSHIP") # Shut down Tor client when controll connection closed break # Terminate on exit atexit.register(self.stopTor) - except Exception as err: - self.log.error("Error starting Tor client: %s" % Debug.formatException(str(err))) + except Exception, err: + self.log.error("Error starting Tor client: %s" % Debug.formatException(err)) self.enabled = False - self.starting = False - self.event_started.set(False) return False - def isSubprocessRunning(self): - return self.tor_process and self.tor_process.pid and self.tor_process.poll() is None - def stopTor(self): self.log.debug("Stopping...") try: - if self.isSubprocessRunning(): - self.request("SIGNAL SHUTDOWN") - except Exception as err: + self.tor_process.terminate() + except Exception, err: self.log.error("Error stopping Tor: %s" % err) + def downloadTor(self): + self.log.info("Downloading Tor...") + # Check Tor webpage for link + download_page = helper.httpRequest("https://www.torproject.org/download/download.html").read() + download_url = re.search('href="(.*?tor.*?win32.*?zip)"', download_page).group(1) + if not download_url.startswith("http"): + download_url = "https://www.torproject.org/download/" + download_url + + # Download Tor client + self.log.info("Downloading %s" % download_url) + data = helper.httpRequest(download_url, as_file=True) + data_size = data.tell() + + # Handle redirect + if data_size < 1024 and "The document has moved" in data.getvalue(): + download_url = re.search('href="(.*?tor.*?win32.*?zip)"', data.getvalue()).group(1) + data = helper.httpRequest(download_url, as_file=True) + data_size = data.tell() + + if data_size > 1024: + import zipfile + zip = zipfile.ZipFile(data) + self.log.info("Unpacking Tor") + for inner_path in zip.namelist(): + if ".." in inner_path: + continue + dest_path = inner_path + dest_path = re.sub("^Data/Tor/", "tools/tor/data/", dest_path) + dest_path = re.sub("^Data/", "tools/tor/data/", dest_path) + dest_path = re.sub("^Tor/", "tools/tor/", dest_path) + dest_dir = os.path.dirname(dest_path) + if dest_dir and not os.path.isdir(dest_dir): + os.makedirs(dest_dir) + + if dest_dir != dest_path.strip("/"): + data = zip.read(inner_path) + if not os.path.isfile(dest_path): + open(dest_path, 'wb').write(data) + else: + self.log.error("Bad response from server: %s" % data.getvalue()) + return False + def connect(self): if not self.enabled: return False @@ -141,8 +159,7 @@ class TorManager(object): else: conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.log.debug("Connecting to Tor Controller %s:%s" % (self.ip, self.port)) - self.connecting = True + self.log.info("Connecting to Tor Controller %s:%s" % (self.ip, self.port)) try: with self.lock: conn.connect((self.ip, self.port)) @@ -150,91 +167,81 @@ class TorManager(object): # Auth cookie file res_protocol = self.send("PROTOCOLINFO", conn) cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol) - - if config.tor_password: - res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn) - elif cookie_match: - cookie_file = cookie_match.group(1).encode("ascii").decode("unicode_escape") - if not os.path.isfile(cookie_file) and self.tor_process: - # Workaround for tor client cookie auth file utf8 encoding bug (https://github.com/torproject/stem/issues/57) - cookie_file = os.path.dirname(self.tor_exe) + "\\data\\control_auth_cookie" + if cookie_match: + cookie_file = cookie_match.group(1).decode("string-escape") auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) - res_auth = self.send("AUTHENTICATE %s" % auth_hex.decode("utf8"), conn) + res_auth = self.send("AUTHENTICATE %s" % auth_hex, conn) + elif config.tor_password: + res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn) else: res_auth = self.send("AUTHENTICATE", conn) - if "250 OK" not in res_auth: - raise Exception("Authenticate error %s" % res_auth) + assert "250 OK" in res_auth, "Authenticate error %s" % res_auth # Version 0.2.7.5 required because ADD_ONION support res_version = self.send("GETINFO version", conn) - version = re.search(r'version=([0-9\.]+)', res_version).group(1) - if float(version.replace(".", "0", 2)) < 207.5: - raise Exception("Tor version >=0.2.7.5 required, found: %s" % version) + version = re.search('version=([0-9\.]+)', res_version).group(1) + assert float(version.replace(".", "0", 2)) >= 207.5, "Tor version >=0.2.7.5 required, found: %s" % version - self.setStatus("Connected (%s)" % res_auth) - self.event_started.set(True) - self.starting = False - self.connecting = False + self.status = u"Connected (%s)" % res_auth self.conn = conn - except Exception as err: + except Exception, err: self.conn = None - self.setStatus("Error (%s)" % str(err)) - self.log.warning("Tor controller connect error: %s" % Debug.formatException(str(err))) + self.status = u"Error (%s)" % err + self.log.error("Tor controller connect error: %s" % Debug.formatException(err)) self.enabled = False return self.conn def disconnect(self): - if self.conn: - self.conn.close() + self.conn.close() self.conn = None def startOnions(self): if self.enabled: self.log.debug("Start onions") self.start_onions = True - self.getOnion("global") # Get new exit node ip def resetCircuits(self): res = self.request("SIGNAL NEWNYM") if "250 OK" not in res: - self.setStatus("Reset circuits error (%s)" % res) + self.status = u"Reset circuits error (%s)" % res self.log.error("Tor reset circuits error: %s" % res) def addOnion(self): if len(self.privatekeys) >= config.tor_hs_limit: - return random.choice([key for key in list(self.privatekeys.keys()) if key != self.site_onions.get("global")]) + return random.choice(self.privatekeys.keys()) result = self.makeOnionAndKey() if result: onion_address, onion_privatekey = result self.privatekeys[onion_address] = onion_privatekey - self.setStatus("OK (%s onions running)" % len(self.privatekeys)) + self.status = u"OK (%s onions running)" % len(self.privatekeys) SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) return onion_address else: return False def makeOnionAndKey(self): - res = self.request("ADD_ONION NEW:ED25519-V3 port=%s" % self.fileserver_port) - match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=ED25519-V3:(.*?)[\r\n]", res, re.DOTALL) + res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port) + match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL) if match: onion_address, onion_privatekey = match.groups() return (onion_address, onion_privatekey) else: - self.setStatus("AddOnion error (%s)" % res) + self.status = u"AddOnion error (%s)" % res self.log.error("Tor addOnion error: %s" % res) return False + def delOnion(self, address): res = self.request("DEL_ONION %s" % address) if "250 OK" in res: del self.privatekeys[address] - self.setStatus("OK (%s onion running)" % len(self.privatekeys)) + self.status = "OK (%s onion running)" % len(self.privatekeys) return True else: - self.setStatus("DelOnion error (%s)" % res) + self.status = u"DelOnion error (%s)" % res self.log.error("Tor delOnion error: %s" % res) self.disconnect() return False @@ -255,44 +262,39 @@ class TorManager(object): back = "" for retry in range(2): try: - conn.sendall(b"%s\r\n" % cmd.encode("utf8")) + conn.sendall("%s\r\n" % cmd) while not back.endswith("250 OK\r\n"): - back += conn.recv(1024 * 64).decode("utf8") + back += conn.recv(1024 * 64).decode("utf8", "ignore") break - except Exception as err: + except Exception, err: self.log.error("Tor send error: %s, reconnecting..." % err) - if not self.connecting: - self.disconnect() - time.sleep(1) - self.connect() + self.disconnect() + time.sleep(1) + self.connect() back = None - if back: - self.log.debug("< %s" % back.strip()) + self.log.debug("< %s" % back.strip()) return back def getPrivatekey(self, address): return self.privatekeys[address] def getPublickey(self, address): - return CryptTor.privatekeyToPublickey(self.privatekeys[address]) + return CryptRsa.privatekeyToPublickey(self.privatekeys[address]) def getOnion(self, site_address): - if not self.enabled: - return None - - if config.tor == "always": # Different onion for every site - onion = self.site_onions.get(site_address) - else: # Same onion for every site - onion = self.site_onions.get("global") - site_address = "global" - - if not onion: - with self.lock: + with self.lock: + if not self.enabled: + return None + if self.start_onions: # Different onion for every site + onion = self.site_onions.get(site_address) + else: # Same onion for every site + onion = self.site_onions.get("global") + site_address = "global" + if not onion: self.site_onions[site_address] = self.addOnion() onion = self.site_onions[site_address] self.log.debug("Created new hidden service for %s: %s" % (site_address, onion)) - - return onion + return onion # Creates and returns a # socket that has connected to the Tor Network @@ -300,12 +302,11 @@ class TorManager(object): if not self.enabled: return False self.log.debug("Creating new Tor socket to %s:%s" % (onion, port)) - if self.starting: - self.log.debug("Waiting for startup...") - self.event_started.get() if config.tor == "always": # Every socket is proxied by default, in this mode sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect((onion, int(port))) else: sock = socks.socksocket() sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port) - return sock + sock.connect((onion, int(port))) + return sock \ No newline at end of file diff --git a/src/Tor/__init__.py b/src/Tor/__init__.py index d0fcffaf..250eac2d 100644 --- a/src/Tor/__init__.py +++ b/src/Tor/__init__.py @@ -1 +1 @@ -from .TorManager import TorManager \ No newline at end of file +from TorManager import TorManager \ No newline at end of file diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py index e73f9be1..9c25eb33 100644 --- a/src/Translate/Translate.py +++ b/src/Translate/Translate.py @@ -3,48 +3,30 @@ import json import logging import inspect import re -import html -import string from Config import config translates = [] - -class EscapeProxy(dict): - # Automatically escape the accessed string values - def __getitem__(self, key): - val = dict.__getitem__(self, key) - if type(val) in (str, str): - return html.escape(val) - elif type(val) is dict: - return EscapeProxy(val) - elif type(val) is list: - return EscapeProxy(enumerate(val)) # Convert lists to dict - else: - return val - - class Translate(dict): def __init__(self, lang_dir=None, lang=None): if not lang_dir: - lang_dir = os.path.dirname(__file__) + "/languages/" + lang_dir = "src/Translate/languages/" if not lang: lang = config.language self.lang = lang self.lang_dir = lang_dir self.setLanguage(lang) - self.formatter = string.Formatter() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.load) + DebugReloader(self.load) translates.append(self) def setLanguage(self, lang): - self.lang = re.sub("[^a-z-]", "", lang) + self.lang = lang self.lang_file = self.lang_dir + "%s.json" % lang self.load() @@ -52,17 +34,9 @@ class Translate(dict): return "" % self.lang def load(self): - if self.lang == "en": - data = {} - dict.__init__(self, data) - self.clear() - elif os.path.isfile(self.lang_file): - try: - data = json.load(open(self.lang_file, encoding="utf8")) - logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data))) - except Exception as err: - logging.error("Error loading translate file %s: %s" % (self.lang_file, err)) - data = {} + if os.path.isfile(self.lang_file): + data = json.load(open(self.lang_file)) + logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data))) dict.__init__(self, data) else: data = {} @@ -73,37 +47,38 @@ class Translate(dict): def format(self, s, kwargs, nested=False): kwargs["_"] = self if nested: - back = self.formatter.vformat(s, [], kwargs) # PY3 TODO: Change to format_map - return self.formatter.vformat(back, [], kwargs) + return s.format(**kwargs).format(**kwargs) else: - return self.formatter.vformat(s, [], kwargs) + return s.format(**kwargs) def formatLocals(self, s, nested=False): kwargs = inspect.currentframe().f_back.f_locals return self.format(s, kwargs, nested=nested) - def __call__(self, s, kwargs=None, nested=False, escape=True): - if not kwargs: + def __call__(self, s, kwargs=None, nested=False): + if kwargs: + return self.format(s, kwargs, nested=nested) + else: kwargs = inspect.currentframe().f_back.f_locals - if escape: - kwargs = EscapeProxy(kwargs) - return self.format(s, kwargs, nested=nested) + return self.format(s, kwargs, nested=nested) def __missing__(self, key): return key def pluralize(self, value, single, multi): if value > 1: - return self[multi].format(value) - else: return self[single].format(value) + else: + return self[multi].format(value) def translateData(self, data, translate_table=None, mode="js"): if not translate_table: translate_table = self + data = data.decode("utf8") + patterns = [] - for key, val in list(translate_table.items()): + for key, val in translate_table.items(): if key.startswith("_("): # Problematic string: only match if called between _(" ") function key = key.replace("_(", "").replace(")", "").replace(", ", '", "') translate_table[key] = "|" + val @@ -126,10 +101,6 @@ class Translate(dict): else: pattern = '"(' + "|".join(patterns) + ')"' data = re.sub(pattern, replacer, data) - - if mode == "html": - data = data.replace("lang={lang}", "lang=%s" % self.lang) # lang get parameter to .js file to avoid cache - - return data + return data.encode("utf8") translate = Translate() diff --git a/src/Translate/__init__.py b/src/Translate/__init__.py index ba0ab6d4..40f34063 100644 --- a/src/Translate/__init__.py +++ b/src/Translate/__init__.py @@ -1 +1 @@ -from .Translate import * \ No newline at end of file +from Translate import * \ No newline at end of file diff --git a/src/Translate/languages/da.json b/src/Translate/languages/da.json index 8e6f0845..90509dd1 100644 --- a/src/Translate/languages/da.json +++ b/src/Translate/languages/da.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Tillykke, din port ({0}) er åben.
    Du er nu fuld klient på ZeroNet!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Tillykke, din port ({0}) er åben.
    Du er nu fuld klient på ZeroNet!", "Tor mode active, every connection using Onion route.": "TOR er aktiv, alle forbindelser anvender Onions.", "Successfully started Tor onion hidden services.": "OK. Startede TOR skjult onion service.", "Unable to start hidden services, please check your config.": "Fejl. Kunne ikke starte TOR skjult onion service. Tjek din opsætning!", "For faster connections open {0} port on your router.": "Åben port {0} på din router for hurtigere forbindelse.", "Your connection is restricted. Please, open {0} port on your router": "Begrænset forbindelse. Åben venligst port {0} på din router", - "or configure Tor to become a full member of the ZeroNet network.": "eller opsæt TOR for fuld adgang til ZeroNet!", + "or configure Tor to become full member of ZeroNet network.": "eller opsæt TOR for fuld adgang til ZeroNet!", "Select account you want to use in this site:": "Vælg bruger til brug på denne side:", "currently selected": "nuværende bruger", @@ -46,6 +46,6 @@ "Site size limit changed to {0}MB": "Max side størrelse ændret til {0}MB", " New version of this page has just released.
    Reload to see the modified content.": " Ny version af denne side er blevet offentliggjort.
    Genindlæs venligst siden (F5) for at se nyt indhold!", "This site requests permission:": "Denne side betyder om tilladdelse:", - "_(Accept)": "Tillad" + "Grant": "Tillad" -} +} \ No newline at end of file diff --git a/src/Translate/languages/de.json b/src/Translate/languages/de.json index 1cc63b74..ceedab24 100644 --- a/src/Translate/languages/de.json +++ b/src/Translate/languages/de.json @@ -1,24 +1,24 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulation, dein Port {0} ist offen.
    Du bist ein volles Mitglied des ZeroNet Netzwerks!", - "Tor mode active, every connection using Onion route.": "Tor Modus aktiv, jede Verbindung nutzt die Onion Route.", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Gratulation, dein Port {0} ist offen.
    Du bist ein volles Mitglied des ZeroNet Netzwerks!", + "Tor mode active, every connection using Onion route.": "Tor modus aktiv, jede verbindung nutzt die Onion Route.", "Successfully started Tor onion hidden services.": "Tor versteckte Dienste erfolgreich gestartet.", "Unable to start hidden services, please check your config.": "Nicht möglich versteckte Dienste zu starten.", - "For faster connections open {0} port on your router.": "Für schnellere Verbindungen, öffne Port {0} auf deinem Router.", + "For faster connections open {0} port on your router.": "Für schnellere verbindungen öffne Port {0} auf deinem Router.", "Your connection is restricted. Please, open {0} port on your router": "Deine Verbindung ist eingeschränkt. Bitte öffne Port {0} auf deinem Router", - "or configure Tor to become a full member of the ZeroNet network.": "oder konfiguriere Tor um ein volles Mitglied des ZeroNet Netzwerks zu werden.", + "or configure Tor to become full member of ZeroNet network.": "oder konfiguriere Tor um ein volles Mitglied des ZeroNet Netzwerks zu werden.", - "Select account you want to use in this site:": "Wähle das Konto, das du auf dieser Seite benutzen willst:", + "Select account you want to use in this site:": "Wähle das Konto, dass du auf dieser Seite benutzen willst:", "currently selected": "aktuell ausgewählt", "Unique to site": "Eindeutig zur Seite", "Content signing failed": "Signierung des Inhalts fehlgeschlagen", "Content publish queued for {0:.0f} seconds.": "Veröffentlichung des Inhalts um {0:.0f} Sekunden verzögert.", "Content published to {0} peers.": "Inhalt zu {0} Peers veröffentlicht.", - "No peers found, but your content is ready to access.": "Keine Peers gefunden, aber dein Inhalt ist bereit zum Zugriff.", - "Your network connection is restricted. Please, open {0} port": "Deine Netzwerkverbindung ist beschränkt. Bitte öffne Port {0}", - "on your router to make your site accessible for everyone.": "auf deinem Router um deine Seite für Jeden zugänglich zu machen.", + "No peers found, but your content is ready to access.": "Keine Peers geufnden, aber dein Inhalt ist bereit zum zugriff.", + "Your network connection is restricted. Please, open {0} port": "Deine Netwerk Verbindung ist beschränkt. Bitte öffne Port {0}", + "on your router to make your site accessible for everyone.": "auf deinem router um deine Seite für jeden zugänglich zu machen.", "Content publish failed.": "Inhalt konnte nicht veröffentlicht werden.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Diese Datei wird noch synchronisiert. Wenn jetzt geschrieben wird geht der vorherige Inhalt verloren.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Diese Datei ist immer noch am synchronisieren. Wenn jetzt geschrieben wird geht der vorherige Inhalt verloren.", "Write content anyway": "Inhalt trotzdem schreiben", "New certificate added:": "Neues Zertifikat hinzugefügt:", "You current certificate:": "Dein aktuelles Zertifikat:", @@ -27,25 +27,25 @@ "Site cloned": "Seite geklont", "You have successfully changed the web interface's language!": "Du hast die Sprache des Webinterface erfolgreich geändert!", - "Due to the browser's caching, the full transformation could take some minute.": "Aufgrund des Browsercaches kann die volle Transformation Minuten dauern.", + "Due to the browser's caching, the full transformation could take some minute.": "Wegen des Browser Cachings kann die volle Transformation Minuten dauern.", - "Connection with UiServer Websocket was lost. Reconnecting...": "Die Verbindung mit UiServer Websocketist abgebrochen. Neu verbinden...", - "Connection with UiServer Websocket recovered.": "Die Verbindung mit UiServer Websocket wurde wiederhergestellt.", + "Connection with UiServer Websocket was lost. Reconnecting...": "Verbindung mit UiServer Websocket wurde verloren. Neu verbinden...", + "Connection with UiServer Websocket recovered.": "Verbindung mit UiServer Websocket wiederhergestellt.", "UiServer Websocket error, please reload the page.": "UiServer Websocket Fehler, bitte Seite neu laden.", "   Connecting...": "   Verbinden...", - "Site size: ": "Seitengröße: ", - "MB is larger than default allowed ": "MB ist größer als der erlaubte Standart", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Öffne Seite und setze das Limit auf \" + site_info.next_size_limit + \"MB", + "Site size: ": "Seiten größe: ", + "MB is larger than default allowed ": "MB ist größer als standartmäßig erlaubt", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Öffne Seite und setze das limit auf \" + site_info.next_size_limit + \"MB", " files needs to be downloaded": " Dateien müssen noch heruntergeladen werden", " downloaded": " heruntergeladen", - " download failed": " Herunterladen fehlgeschlagen", + " download failed": " herunterladen fehlgeschlagen", "Peers found: ": "Peers gefunden: ", "No peers found": "Keine Peers gefunden", - "Running out of size limit (": "Das Speicherlimit ist bald ausgeschöpft (", + "Running out of size limit (": "Das Speicher Limit wird knapp (", "Set limit to \" + site_info.next_size_limit + \"MB": "Limit auf \" + site_info.next_size_limit + \"MB ändern", - "Site size limit changed to {0}MB": "Speicherlimit für diese Seite auf {0}MB geändert", + "Site size limit changed to {0}MB": "Speicher Limit für diese Seite auf {0}MB geändert", " New version of this page has just released.
    Reload to see the modified content.": " Neue version dieser Seite wurde gerade veröffentlicht.
    Lade die Seite neu um den geänderten Inhalt zu sehen.", "This site requests permission:": "Diese Seite fordert rechte:", - "_(Accept)": "Genehmigen" + "Grant": "Genehmigen" } diff --git a/src/Translate/languages/es.json b/src/Translate/languages/es.json index 4cac077b..659dc0e9 100644 --- a/src/Translate/languages/es.json +++ b/src/Translate/languages/es.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "¡Felicidades! tu puerto {0} está abierto.
    ¡Eres un miembro completo de la red Zeronet!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "¡Felicidades! tu puerto {0} está abierto.
    ¡Eres un miembro completo de la red Zeronet!", "Tor mode active, every connection using Onion route.": "Modo Tor activado, cada conexión usa una ruta Onion.", "Successfully started Tor onion hidden services.": "Tor ha iniciado satisfactoriamente la ocultación de los servicios onion.", "Unable to start hidden services, please check your config.": "No se puedo iniciar los servicios ocultos, por favor comprueba tu configuración.", "For faster connections open {0} port on your router.": "Para conexiones más rápidas abre el puerto {0} en tu router.", "Your connection is restricted. Please, open {0} port on your router": "Tu conexión está limitada. Por favor, abre el puerto {0} en tu router", - "or configure Tor to become a full member of the ZeroNet network.": "o configura Tor para convertirte en un miembro completo de la red ZeroNet.", + "or configure Tor to become full member of ZeroNet network.": "o configura Tor para convertirte en un miembro completo de la red ZeroNet.", "Select account you want to use in this site:": "Selecciona la cuenta que quieres utilizar en este sitio:", "currently selected": "actualmente seleccionada", @@ -46,6 +46,6 @@ "Site size limit changed to {0}MB": "Límite de tamaño del sitio cambiado a {0}MBs", " New version of this page has just released.
    Reload to see the modified content.": " Se ha publicado una nueva versión de esta página .
    Recarga para ver el contenido modificado.", "This site requests permission:": "Este sitio solicita permiso:", - "_(Accept)": "Conceder" + "Grant": "Conceder" } diff --git a/src/Translate/languages/fa.json b/src/Translate/languages/fa.json deleted file mode 100644 index e644247a..00000000 --- a/src/Translate/languages/fa.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "تبریک، درگاه {0} شما باز شده است.
    شما یک عضو تمام شبکه ZeroNet هستید!", - "Tor mode active, every connection using Onion route.": "حالت Tor فعال است، هر ارتباط از مسیریابی پیاز (Onion) استفاده می‌کند.", - "Successfully started Tor onion hidden services.": "خدمات پنهان پیاز (Onion) Tor با موفقیت راه‌اندازی شد.", - "Unable to start hidden services, please check your config.": "قادر به راه‌اندازی خدمات پنهان نیستیم، لطفا تنظیمات خود را بررسی نمایید.", - "For faster connections open {0} port on your router.": "برای ارتباطات سریعتر درگاه {0} را بر روی مسیریاب (روتر) خود باز نمایید.", - "Your connection is restricted. Please, open {0} port on your router": "ارتباط شما محدود‌شده است. لطفا درگاه {0} را در مسیریاب (روتر) خود باز نمایید", - "or configure Tor to become a full member of the ZeroNet network.": "یا پیکربندی Tor را انجام دهید تا به یک عضو تمام شبکه ZeroNet تبدیل شوید.", - - "Select account you want to use in this site:": "حسابی را که می‌خواهید در این سایت استفاده کنید، انتخاب کنید:", - "currently selected": "در حال حاضر انتخاب‌شده", - "Unique to site": "مختص به سایت", - - "Content signing failed": "امضای محتوا با شکست مواجه شد", - "Content publish queued for {0:.0f} seconds.": "محتوا در صف انتشار با {0:.0f} ثانیه تاخیر قرار گرفت.", - "Content published to {0} peers.": "محتوا برای {0} تعداد همتا انتشار یافت.", - "No peers found, but your content is ready to access.": "همتایی یافت نشد، اما محتوای شما آماده دسترسی است.", - "Your network connection is restricted. Please, open {0} port": "ارتباط شبکه شما محدود‌شده است. لطفا درگاه {0} را", - "on your router to make your site accessible for everyone.": "در مسیریاب (روتر) خود باز کنید تا سایت خود را برای همه در دسترس قرار دهید.", - "Content publish failed.": "انتشار محتوا موفق نبود.", - "This file still in sync, if you write it now, then the previous content may be lost.": "این فایل همچنان همگام است، اگز شما آن را بنویسید، ممکن است محتوای قبلی از‌بین رود.", - "Write content anyway": "در هر صورت محتوا را بنویس", - "New certificate added:": "گواهی جدیدی افزوده شد:", - "You current certificate:": "گواهی فعلی شما:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "تغییرش بده به {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "گواهینامه به: {auth_type}/{auth_user_name}@{domain} تغییر پیدا کرد.", - "Site cloned": "سایت همسان‌سازی شد", - - "You have successfully changed the web interface's language!": "شما با موفقیت زبان رابط وب را تغییر دادید!", - "Due to the browser's caching, the full transformation could take some minute.": "به دلیل ذخیره‌سازی در مرور‌گر، امکان دارد تغییر شکل کامل چند دقیقه طول بکشد.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "اتصال با UiServer Websocket قطع شد. اتصال دوباره...", - "Connection with UiServer Websocket recovered.": "ارتباط با UiServer Websocket دوباره بر‌قرار شد.", - "UiServer Websocket error, please reload the page.": "خطای UiServer Websocket, لطفا صفحه را دوباره بارگیری کنید.", - "   Connecting...": "   برقراری ارتباط...", - "Site size: ": "حجم سایت: ", - "MB is larger than default allowed ": "MB بیشتر از پیش‌فرض مجاز است ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "سایت را باز کرده و محدوده حجم را به \" + site_info.next_size_limit + \"MB تنظیم کن", - " files needs to be downloaded": " فایل‌هایی که نیاز است، دانلود شوند", - " downloaded": " دانلود شد", - " download failed": " دانلود موفق نبود", - "Peers found: ": "چند همتا یافت شد: ", - "No peers found": "همتایی یافت نشد", - "Running out of size limit (": "عبور کرده از محدوده حجم (", - "Set limit to \" + site_info.next_size_limit + \"MB": "محدوده را به \" + site_info.next_size_limit + \"MB تنظیم کن", - "Site size limit changed to {0}MB": "محدوده حجم سایت به {0}MB تغییر کرد", - " New version of this page has just released.
    Reload to see the modified content.": " نسخه جدیدی از این صفحه منتشر شده است.
    برای مشاهده محتوای تغییر‌یافته دوباره بارگیری نمایید.", - "This site requests permission:": "این سایت درخواست مجوز می‌کند:", - "_(Accept)": "_(پذیرفتن)" -} diff --git a/src/Translate/languages/fr.json b/src/Translate/languages/fr.json index b46ef2c3..b6881cc4 100644 --- a/src/Translate/languages/fr.json +++ b/src/Translate/languages/fr.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Félicitations, le port ({0}) est ouvert.
    Vous êtes maintenant membre de ZeroNet!!", - "Tor mode active, every connection using Onion route.": "Tor actif, toutes les connexions utilisent un routage Onion.", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Félicitations, le port ({0}) est ouvert.
    Vous êtes maintenant membre de ZeroNet!!", + "Tor mode active, every connection using Onion route.": "Tor actif, toutes les connections utilisent un routage Onion.", "Successfully started Tor onion hidden services.": "Tor activé avec succès.", - "Unable to start hidden services, please check your config.": "Impossible d'activer Tor, veuillez vérifier votre configuration.", + "Unable to start hidden services, please check your config.": "Incapable d'activer Tor, veuillez vérifier votre configuration.", "For faster connections open {0} port on your router.": "Pour une meilleure connectivité, ouvrez le port {0} sur votre routeur.", "Your connection is restricted. Please, open {0} port on your router": "Connectivité limitée. Veuillez ouvrir le port {0} sur votre routeur", - "or configure Tor to become a full member of the ZeroNet network.": "ou configurez Tor afin d'avoir accès aux pairs ZeroNet Onion.", + "or configure Tor to become full member of ZeroNet network.": "ou configurez Tor afin d'avoir accès aux pairs ZeroNet Onion.", "Select account you want to use in this site:": "Sélectionnez le compte que vous voulez utiliser pour ce site:", "currently selected": "présentement sélectionné", @@ -20,20 +20,20 @@ "Content publish failed.": "Échec de la publication du contenu.", "This file still in sync, if you write it now, then the previous content may be lost.": "Ce fichier n'est pas à jour, si vous le modifiez maintenant une version antérieure pourrait être perdue.", "Write content anyway": "Enregistrer quand même", - "New certificate added:": "Nouveau cetificat ajouté :", - "You current certificate:": "Votre certificat actuel :", + "New certificate added:": "Nouveau cetificat ajouté:", + "You current certificate:": "Votre certificat actuel:", "Change it to {auth_type}/{auth_user_name}@{domain}": "Changer pour {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificat changé pour : {auth_type}/{auth_user_name}@{domain}-ra.", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificat changé pour: {auth_type}/{auth_user_name}@{domain}-ra.", "Site cloned": "Site cloné", "You have successfully changed the web interface's language!": "Vous avez modifié la langue d'affichage avec succès!", - "Due to the browser's caching, the full transformation could take some minute.": "En fonction du cache du navigateur, la modification pourrait prendre quelques minutes.", + "Due to the browser's caching, the full transformation could take some minute.": "En fonction de la cache du navigateur, la modification pourrait prendre quelques minutes.", - "Connection with UiServer Websocket was lost. Reconnecting...": "Connexion avec UiServer Websocket rompue. Reconnexion...", - "Connection with UiServer Websocket recovered.": "Connexion avec UiServer Websocket rétablie.", + "Connection with UiServer Websocket was lost. Reconnecting...": "Connection avec UiServer Websocket rompue. Reconnection...", + "Connection with UiServer Websocket recovered.": "Connection avec UiServer Websocket rétablie.", "UiServer Websocket error, please reload the page.": "Erreur du UiServer Websocket, veuillez recharger la page.", - "   Connecting...": "   Connexion...", - "Site size: ": "Taille du site : ", + "   Connecting...": "   Connection...", + "Site size: ": "Taille du site: ", "MB is larger than default allowed ": "MB est plus large que la taille permise par défaut ", "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Ouvrez le site et augmentez la taille maximale à \" + site_info.next_size_limit + \"MB-ra", " files needs to be downloaded": " fichiers doivent être téléchargés", @@ -45,7 +45,7 @@ "Set limit to \" + site_info.next_size_limit + \"MB": "Augmentez la taille maximale à \" + site_info.next_size_limit + \"MB", "Site size limit changed to {0}MB": "Taille maximale du site changée à {0}MB", " New version of this page has just released.
    Reload to see the modified content.": " Une nouvelle version de cette page vient d'être publiée.
    Rechargez pour voir les modifications.", - "This site requests permission:": "Ce site requiert une permission :", - "_(Accept)": "Autoriser" + "This site requests permission:": "Ce site requiert une permission:", + "Grant": "Autoriser" } diff --git a/src/Translate/languages/hu.json b/src/Translate/languages/hu.json index f9487f1d..eb43615f 100644 --- a/src/Translate/languages/hu.json +++ b/src/Translate/languages/hu.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulálunk, a portod ({0}) nyitva van.
    Teljes értékű tagja vagy a hálózatnak!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Gratulálunk, a portod ({0}) nyitva van.
    Teljes értékű tagja vagy a hálózatnak!", "Tor mode active, every connection using Onion route.": "Tor mód aktív, minden kapcsolat az Onion hálózaton keresztül történik.", "Successfully started Tor onion hidden services.": "Sikeresen elindultak a Tor onion titkos szolgáltatások.", "Unable to start hidden services, please check your config.": "Nem sikerült elindítani a Tor onion szolgáltatásokat. Kérjük, ellenőrizd a beállításokat!", "For faster connections open {0} port on your router.": "A gyorsabb kapcsolatok érdekében nyisd ki a {0} portot a routereden.", "Your connection is restricted. Please, open {0} port on your router": "A kapcsolatod korlátozott. Kérjük, nyisd ki a {0} portot a routereden", - "or configure Tor to become a full member of the ZeroNet network.": "vagy állítsd be a Tor kliensed, hogy teljes értékű tagja legyél a hálózatnak!", + "or configure Tor to become full member of ZeroNet network.": "vagy állítsd be a Tor kliensed, hogy teljes értékű tagja legyél a hálózatnak!", "Select account you want to use in this site:": "Válaszd ki az oldalhoz használt felhasználónevet:", "currently selected": "jelenleg kijelölt", @@ -46,6 +46,6 @@ "Site size limit changed to {0}MB": "A méretkorlát módosítva {0}MB-ra", " New version of this page has just released.
    Reload to see the modified content.": "Az oldal épp most módosult
    A megváltozott tartalomért töltsd újra!", "This site requests permission:": "Az oldal megtekintéséhez szükséges jog:", - "_(Accept)": "Engedélyezés" + "Grant": "Engedélyezés" -} +} \ No newline at end of file diff --git a/src/Translate/languages/it.json b/src/Translate/languages/it.json index 47992328..95c69de0 100644 --- a/src/Translate/languages/it.json +++ b/src/Translate/languages/it.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Congratulazioni, la tua porta {0} è aperta.
    Ora sei un membro effettivo della rete ZeroNet!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Congratulazioni, la tua porta ({0}) è aperta.
    Sei ora pieno membro della rete ZeroNet!", "Tor mode active, every connection using Onion route.": "Modalità Tor attiva, ogni connessione sta usando la rete Onion.", - "Successfully started Tor onion hidden services.": "Servizi Tor onion nascosti avviati con successo.", - "Unable to start hidden services, please check your config.": "Impossibile avviare i servizi nascosti. Si prega di controllare la propria configurazione!", - "For faster connections open {0} port on your router.": "Per avere connessioni più veloci aprire la porta {0} sul router.", - "Your connection is restricted. Please, open {0} port on your router": "La tua connessione è limitata. Aprire la porta {0} sul router", - "or configure Tor to become a full member of the ZeroNet network.": "o configurare Tor per diventare membro effettivo della rete ZeroNet!", + "Successfully started Tor onion hidden services.": "Tor onion hidden service avviati con successo.", + "Unable to start hidden services, please check your config.": "Impossibile avviare gli hidden service. Si prega di controllare la propria configurazione!", + "For faster connections open {0} port on your router.": "Per avere connessioni più veloci devi aprire la porta {0} sul tuo router.", + "Your connection is restricted. Please, open {0} port on your router": "La tua connessione è limitata. Dovresti aprire la porta {0} sul tuo router", + "or configure Tor to become full member of ZeroNet network.": "o configurare Tor per diventare pieno membro della rete ZeroNet!", "Select account you want to use in this site:": "Seleziona l'account che vuoi utilizzare per questo sito:", "currently selected": "attualmente selezionato", @@ -14,38 +14,38 @@ "Content signing failed": "Firma contenuti fallita", "Content publish queued for {0:.0f} seconds.": "Pubblicazione contenuti in coda per {0:.0f} secondi.", "Content published to {0} peers.": "Contenuti pubblicati su {0} peer.", - "No peers found, but your content is ready to access.": "Nessun peer trovato, ma i tuoi contenuti sono pronti per l'accesso.", - "Your network connection is restricted. Please, open {0} port": "La tua connessione di rete è limitata. Aprire la porta {0} ", - "on your router to make your site accessible for everyone.": "sul router, per rendere il sito accessibile a chiunque.", + "No peers found, but your content is ready to access.": "Nessun peer trovato, ma i tuoi contenuti sono pronti per accederci.", + "Your network connection is restricted. Please, open {0} port": "La tua connessione di rete è limitata. Dovresti di aprire la porta {0} ", + "on your router to make your site accessible for everyone.": "sul tuo router, per poter rendere il tuo sito accessibile a chiunque.", "Content publish failed.": "Pubblicazione contenuti fallita.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Questo file è ancora in sincronizzazione, se viene modificato i contenuti precedenti andranno persi.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Questo file è ancora in sincronizzazione, se ci scrivi sopra i contenuti precedenti andranno persi.", "Write content anyway": "Scrivere comunque i contenuti", "New certificate added:": "Aggiunto nuovo certificato:", - "You current certificate:": "Il tuo attuale certificato:", + "You current certificate:": "Il tuo attuale certificato: ", "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambiarlo in {auth_type}/{auth_user_name}@{domain}", "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificato cambianto in: {auth_type}/{auth_user_name}@{domain}.", "Site cloned": "Sito clonato", - "You have successfully changed the web interface's language!": "Hai cambiato con successo la lingua dell'interfaccia web!", - "Due to the browser's caching, the full transformation could take some minute.": "La trasformazione completa potrebbe richiedre alcuni minuti a causa della cache del browser.", + "You have successfully changed the web interface's language!": "Hai cambiato con successo la lingua sull'interfaccia web!", + "Due to the browser's caching, the full transformation could take some minute.": "Per via della cache del browser, la completa trasformazione potrebbe richiedre alcuni minuti.", "Connection with UiServer Websocket was lost. Reconnecting...": "La connessione con UiServer Websocket è andata persa. Riconnessione...", "Connection with UiServer Websocket recovered.": "Connessione con UiServer Websocket recuperata.", - "UiServer Websocket error, please reload the page.": "Errore UiServer Websocket, ricaricare la pagina!", + "UiServer Websocket error, please reload the page.": "Errore UiServer Websocket, si prega di ricaricare la pagina!", "   Connecting...": "   Connessione...", - "Site size: ": "Dimensione del sito: ", - "MB is larger than default allowed ": "MB è più grande del valore predefinito consentito ", + "Site size: ": "Dimensione sito: ", + "MB is larger than default allowed ": "MB è più grande di quanto è permesso di default ", "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Aprire il sito e impostare la dimensione limite a \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " i file devono essere scaricati", + " files needs to be downloaded": " file richiesti che siano scaricati", " downloaded": " scaricati", " download failed": " scaricamento fallito", "Peers found: ": "Peer trovati: ", "No peers found": "Nessun peer trovato", - "Running out of size limit (": "Superato il limite di spazio (", + "Running out of size limit (": "Raggiungimento del limite di spazio (", "Set limit to \" + site_info.next_size_limit + \"MB": "Imposta il limite a \" + site_info.next_size_limit + \"MB", "Site size limit changed to {0}MB": "Limite di spazio cambiato a {0}MB", " New version of this page has just released.
    Reload to see the modified content.": "E' stata rilasciata una nuova versione di questa pagina
    Ricaricare per vedere il contenuto modificato!", "This site requests permission:": "Questo sito richiede permessi:", - "_(Accept)": "Concedere" + "Grant": "Concedere" } diff --git a/src/Translate/languages/jp.json b/src/Translate/languages/jp.json deleted file mode 100644 index ff10aee4..00000000 --- a/src/Translate/languages/jp.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "おめでとうございます。ポート {0} が開きました。これでZeroNetネットワークのメンバーです。", - "Tor mode active, every connection using Onion route.": "Torモードがアクティブです、全ての接続はOnionルートを使用します。", - "Successfully started Tor onion hidden services.": "Tor onionサービスを正常に開始しました。", - "Unable to start hidden services, please check your config.": "非表示のサービスを開始できません。設定を確認してください。", - "For faster connections open {0} port on your router.": "接続を高速化するにはルーターのポート {0} を開けてください。", - "Your connection is restricted. Please, open {0} port on your router": "接続が制限されています。ルーターのポート {0} を開けてください。", - "or configure Tor to become a full member of the ZeroNet network.": "または、TorをZeroNetネットワークのメンバーになるように設定してください。", - - "Select account you want to use in this site:": "このサイトで使用するアカウントを選択:", - "No certificate": "証明書がありません", - "currently selected": "現在選択中", - "Unique to site": "サイト固有", - - "Content signing failed": "コンテンツの署名に失敗", - "Content publish queued for {0:.0f} seconds.": "コンテンツの公開は{0:.0f}秒のキューに入れられました。", - "Content published to {0}/{1} peers.": "サイトの更新を通知済 {0}/{1} ピア", - "Content published to {0} peers.": "{0}ピアに公開されたコンテンツ。", - "No peers found, but your content is ready to access.": "ピアは見つかりませんでしたが、コンテンツにアクセスする準備ができました。", - "Your network connection is restricted. Please, open {0} port": "ネットワーク接続が制限されています。ポート {0} を開いて、", - "on your router to make your site accessible for everyone.": "誰でもサイトにアクセスできるようにしてください。", - "Content publish failed.": "コンテンツの公開に失敗しました。", - "This file still in sync, if you write it now, then the previous content may be lost.": "このファイルはまだ同期しています。今すぐ書き込むと、前のコンテンツが失われる可能性があります。", - "Write content anyway": "とにかくコンテンツを書く", - "New certificate added:": "新しい証明書が追加されました:", - "You current certificate:": "現在の証明書:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} に変更", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "変更後の証明書: {auth_type}/{auth_user_name}@{domain}", - "Site cloned": "複製されたサイト", - - "You have successfully changed the web interface's language!": "Webインターフェースの言語が正常に変更されました!", - "Due to the browser's caching, the full transformation could take some minute.": "ブラウザのキャッシュにより、完全な変換には数分かかる場合があります。", - - "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocketとの接続が失われました。再接続しています...", - "Connection with UiServer Websocket recovered.": "UiServer Websocketとの接続が回復しました。", - "UiServer Websocket error, please reload the page.": "UiServer Websocketエラー、ページをリロードしてください。", - "   Connecting...": "   接続しています...", - "Site size: ": "サイトサイズ: ", - "MB is larger than default allowed ": "MBはデフォルトの許容値よりも大きいです。 ", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "サイトを開き、サイズ制限を \" + site_info.next_size_limit + \"MB に設定", - " files needs to be downloaded": " ファイルをダウンロードする必要があります", - " downloaded": " ダウンロード", - " download failed": " ダウンロード失敗", - "Peers found: ": "ピアが見つかりました: ", - "No peers found": "ピアが見つかりません", - "Running out of size limit (": "サイズ制限を使い果たしました (", - "Set limit to \" + site_info.next_size_limit + \"MB": "制限を \" + site_info.next_size_limit + \"MB に設定", - "Cloning site...": "サイトを複製中…", - "Site size limit changed to {0}MB": "サイトのサイズ制限が {0}MB に変更されました", - " New version of this page has just released.
    Reload to see the modified content.": " このページの新しいバージョンが公開されました。
    変更されたコンテンツを見るには再読み込みしてください。", - "This site requests permission:": "このサイトは権限を要求しています:", - "_(Accept)": "_(許可)", - - "Save": "保存", - "Trackers announcing": "トラッカーをお知らせ", - "Error": "エラー", - "Done": "完了", - "Tracker connection error detected.": "トラッカー接続エラーが検出されました。", - - "Update ZeroNet client to latest version?": "ZeroNetクライアントを最新版に更新しますか?", - "Update": "更新", - "Restart ZeroNet client?": "ZeroNetクライアントを再起動しますか?", - "Restart": "再起動", - "Shut down ZeroNet client?": "ZeroNetクライアントを終了しますか?", - "Shut down": "終了" -} diff --git a/src/Translate/languages/nl.json b/src/Translate/languages/nl.json index 985cce7a..d2cd241c 100644 --- a/src/Translate/languages/nl.json +++ b/src/Translate/languages/nl.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gefeliciteerd, je poort {0} is geopend.
    Je bent een volledig lid van het ZeroNet netwerk!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Gefeliciteerd, je poort {0} is geopend.
    Je bent een volledig lid van het ZeroNet netwerk!", "Tor mode active, every connection using Onion route.": "Tor modus actief, elke verbinding gebruikt een Onion route.", "Successfully started Tor onion hidden services.": "Tor onion verborgen diensten zijn met succes gestart.", "Unable to start hidden services, please check your config.": "Het was niet mogelijk om verborgen diensten te starten, controleer je configuratie.", "For faster connections open {0} port on your router.": "Voor snellere verbindingen open je de poort {0} op je router.", "Your connection is restricted. Please, open {0} port on your router": "Je verbinding is beperkt. Open altjeblieft poort {0} op je router", - "or configure Tor to become a full member of the ZeroNet network.": "of configureer Tor om een volledig lid van het ZeroNet netwerk te worden.", + "or configure Tor to become full member of ZeroNet network.": "of configureer Tor om een volledig lid van het ZeroNet netwerk te worden.", "Select account you want to use in this site:": "Selecteer het account die je wilt gebruiken binnen deze site:", "currently selected": "huidige selectie", @@ -46,6 +46,6 @@ "Site size limit changed to {0}MB": "Site limiet op grootte is veranderd naar {0}MB", " New version of this page has just released.
    Reload to see the modified content.": " Een nieuwe versie van deze pagina is zojuist uitgekomen.
    Herlaad de pagina om de bijgewerkte inhoud te zien.", "This site requests permission:": "Deze site vraagt om permissie:", - "_(Accept)": "Toekennen" + "Grant": "Toekennen" } diff --git a/src/Translate/languages/pl.json b/src/Translate/languages/pl.json index 679e909d..e3087c73 100644 --- a/src/Translate/languages/pl.json +++ b/src/Translate/languages/pl.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulacje, twój port {0} jest otwarty.
    Jesteś pełnoprawnym użytkownikiem sieci ZeroNet!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Gratulacje, twój port {0} jest otwarty.
    Jesteś pełnoprawnym użytkownikiem sieci ZeroNet!", "Tor mode active, every connection using Onion route.": "Tryb Tor aktywny, każde połączenie przy użyciu trasy Cebulowej.", "Successfully started Tor onion hidden services.": "Pomyślnie zainicjowano ukryte usługi cebulowe Tor.", "Unable to start hidden services, please check your config.": "Niezdolny do uruchomienia ukrytych usług, proszę sprawdź swoją konfigurację.", "For faster connections open {0} port on your router.": "Dla szybszego połączenia otwórz {0} port w swoim routerze.", "Your connection is restricted. Please, open {0} port on your router": "Połączenie jest ograniczone. Proszę, otwórz port {0} w swoim routerze", - "or configure Tor to become a full member of the ZeroNet network.": "bądź skonfiguruj Tora by stać się pełnoprawnym użytkownikiem sieci ZeroNet.", + "or configure Tor to become full member of ZeroNet network.": "bądź skonfiguruj Tora by stać się pełnoprawnym użytkownikiem sieci ZeroNet.", "Select account you want to use in this site:": "Wybierz konto którego chcesz użyć na tej stronie:", "currently selected": "aktualnie wybrany", @@ -13,8 +13,8 @@ "Content signing failed": "Podpisanie treści zawiodło", "Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).", - "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników.", - "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników, ale twoja treść jest dostępna.", + "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników równorzednych.", + "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników równorzędnych, ale twoja treść jest dostępna.", "Your network connection is restricted. Please, open {0} port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwórz port {0}", "on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.", "Content publish failed.": "Publikacja treści zawiodła.", @@ -39,16 +39,13 @@ " files needs to be downloaded": " pliki muszą zostać ściągnięte", " downloaded": " ściągnięte", " download failed": " ściąganie nie powiodło się", - "Peers found: ": "Odnaleziono użytkowników: ", - "No peers found": "Nie odnaleziono użytkowników", + "Peers found: ": "Odnaleziono użytkowników równorzednych: ", + "No peers found": "Nie odnaleziono użytkowników równorzędnych", "Running out of size limit (": "Limit rozmiaru na wyczerpaniu (", "Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBów", "Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBów", " New version of this page has just released.
    Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana.
    Odśwież by zobaczyć nową, zmodyfikowaną treść strony.", "This site requests permission:": "Ta strona wymaga uprawnień:", - "_(Accept)": "Przyznaj uprawnienia", + "Grant": "Przyznaj uprawnienia" - "Sign and publish": "Podpisz i opublikuj", - "Restart ZeroNet client?": "Uruchomić ponownie klienta ZeroNet?", - "Restart": "Uruchom ponownie" } diff --git a/src/Translate/languages/pt-br.json b/src/Translate/languages/pt-br.json index a842684f..d0aaf541 100644 --- a/src/Translate/languages/pt-br.json +++ b/src/Translate/languages/pt-br.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Parabéns, a porta{0} está aberta.
    Você é um membro completo da rede ZeroNet!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Parabéns, a porta{0} está aberta.
    Você é um membro completo da rede ZeroNet!", "Tor mode active, every connection using Onion route.": "Modo Tor ativado, todas as conexões usam a rota Onion.", "Successfully started Tor onion hidden services.": "Os serviços ocultos Tor onion foram inciados com sucesso.", "Unable to start hidden services, please check your config.": "Não foi possível iniciar os serviços ocultos, por favor verifique suas configurações.", "For faster connections open {0} port on your router.": "Para conexões mais rápidas, abra a porta {0} em seu roteador.", "Your connection is restricted. Please, open {0} port on your router": "Sua conexão está restrita. Por favor, abra a porta {0} em seu roteador", - "or configure Tor to become a full member of the ZeroNet network.": "ou configure o Tor para se tornar um membro completo da rede ZeroNet.", + "or configure Tor to become full member of ZeroNet network.": "ou configure o Tor para se tornar um membro completo da rede ZeroNet.", "Select account you want to use in this site:": "Selecione a conta que deseja usar nesse site:", "currently selected": "atualmente selecionada", @@ -13,8 +13,8 @@ "Content signing failed": "Assinatura de conteúdo falhou", "Content publish queued for {0:.0f} seconds.": "Publicação de conteúdo na fila por {0:.0f} segundos.", - "Content published to {0} peers.": "Conteúdo publicado para {0} peers.", - "No peers found, but your content is ready to access.": "Nenhum peer encontrado, mas seu conteúdo está pronto para ser acessado.", + "Content published to {0} peers.": "Conteúdo publicado para {0} pares.", + "No peers found, but your content is ready to access.": "Nenhum par encontrado, mas seu conteúdo está pronto para ser acessado.", "Your network connection is restricted. Please, open {0} port": "Sua conexão de rede está restrita. Por favor, abra a porta {0}", "on your router to make your site accessible for everyone.": "em seu roteador para tornar seu site acessível para todos.", "Content publish failed.": "Publicação de conteúdo falhou.", @@ -39,19 +39,13 @@ " files needs to be downloaded": " os arquivos precisam ser baixados", " downloaded": " baixados", " download failed": " falha no download", - "Peers found: ": "Peers encontrados: ", - "No peers found": "Nenhum peer encontrado", + "Peers found: ": "Pares encontrados: ", + "No peers found": "Nenhum par encontrado", "Running out of size limit (": "Passando do tamanho limite (", "Set limit to \" + site_info.next_size_limit + \"MB": "Definir limite para \" + site_info.next_size_limit + \"MB", "Site size limit changed to {0}MB": "Limite de tamanho do site alterado para {0}MBs", " New version of this page has just released.
    Reload to see the modified content.": " Uma nova versão desse site acaba de ser publicada.
    Atualize para ver o conteúdo modificado.", "This site requests permission:": "Esse site solicita permissão:", - "_(Accept)": "Conceder", - - "Save": "Salvar", - "Trackers announcing": "Trackers anunciando", - "Error": "Erro", - "Done": "Concluído", - "Tracker connection error detected.": "Erro de conexão com tracker foi detectado." + "Grant": "Conceder" } diff --git a/src/Translate/languages/ru.json b/src/Translate/languages/ru.json index 96c84b91..48e29e77 100644 --- a/src/Translate/languages/ru.json +++ b/src/Translate/languages/ru.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Поздравляем, ваш порт {0} открыт.
    Вы полноценный участник сети ZeroNet!", - "Tor mode active, every connection using Onion route.": "Режим Tor включен, все соединения осуществляются через Tor.", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Поздравляем, ваш пор {0} открыт.
    Вы полноценный участник сети ZeroNet!", + "Tor mode active, every connection using Onion route.": "Режима Tor включен, все соединения осуществляются через Tor.", "Successfully started Tor onion hidden services.": "Скрытый сервис Tor запущено успешно.", "Unable to start hidden services, please check your config.": "Ошибка при запуске скрытого сервиса, пожалуйста проверьте настройки", "For faster connections open {0} port on your router.": "Для более быстрой работы сети откройте {0} порт на вашем роутере.", "Your connection is restricted. Please, open {0} port on your router": "Подключение ограничено. Пожалуйста откройте {0} порт на вашем роутере", - "or configure Tor to become a full member of the ZeroNet network.": "или настройте Tor что бы стать полноценным участником сети ZeroNet.", + "or configure Tor to become full member of ZeroNet network.": "или настройте Tor что бы стать полноценным участником сети ZeroNet.", "Select account you want to use in this site:": "Выберите аккаунт для использования на этом сайте:", "currently selected": "сейчас выбран", @@ -46,6 +46,6 @@ "Site size limit changed to {0}MB": "Лимит памяти на диске изменен на {0}MB", " New version of this page has just released.
    Reload to see the modified content.": "Доступна новая версия данной страницы
    Обновите страницу, что бы увидеть изменения!", "This site requests permission:": "Данный сайт запрашивает разрешения:", - "_(Accept)": "Предоставить" + "Grant": "Предоставить" } diff --git a/src/Translate/languages/sk.json b/src/Translate/languages/sk.json deleted file mode 100644 index 8fb4554b..00000000 --- a/src/Translate/languages/sk.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Blahoželáme, váš port {0} je otvorený.
    Ste úplným členom siete ZeroNet!", - "Tor mode active, every connection using Onion route.": "Tor mód aktívny, všetky spojenia teraz používajú Onion sieť.", - "Successfully started Tor onion hidden services.": "Tor úspešne spustený.", - "Unable to start hidden services, please check your config.": "Nebolo možné spustiť Tor, prosím skontrolujte nastavenia.", - "For faster connections open {0} port on your router.": "Pre rýchlejšie spojenie otvorte na vašom routery port {0}", - "Your connection is restricted. Please, open {0} port on your router": "Vaše pripojenie je obmedzené. Prosím otvorte port {0} na vašom routery.", - "or configure Tor to become a full member of the ZeroNet network.": "alebo nastavte Tor aby ste sa tali plným členom siete ZeroNet.", - - "Select account you want to use in this site:": "Zvoľte účet ktorý chcete používať na tejto stránke:", - "currently selected": "aktuálne zvolené", - "Unique to site": "Unikátny pre stránku", - - "Content signing failed": "Podpísanie obsahu zlyhalo", - "Content publish queued for {0:.0f} seconds.": "Podpísanie obsahu bude na rade za {0:.0f} sekúnd", - "Content published to {0} peers.": "Obsah publikovaný {0} peer-erom", - "No peers found, but your content is ready to access.": "Neboli nájdený žiadny peer-ery, ale váš obsah je pripravený pre prístup.", - "Your network connection is restricted. Please, open {0} port": "Vaše pripojenie k sieti je obmedzené. Prosím otvorte port {0} na vašom routery.", - "on your router to make your site accessible for everyone.": "na vašom routery aby bola vaša stránka prístupná pre všetkých.", - "Content publish failed.": "Publikovanie obsahu zlyhalo.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Tento súbor sa stále synchronizuje, ak v ňom spravíte zmeny, predchádzajúci obsah sa môže stratiť.", - "Write content anyway": "Aj tak spraviť zmeny", - "New certificate added:": "Pridaný nový certifikát:", - "You current certificate:": "Váš aktuálny certifikát:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Zvoľte to na {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certifikát zmenený na: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Stránka naklonovaná", - - "You have successfully changed the web interface's language!": "Úspešne ste zmenili jazyk webového rozhrania!", - "Due to the browser's caching, the full transformation could take some minute.": "Kôli cachu webového prehliadavača, ceľková transformácia môže chvíĺu trvať.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Spojenie s UiServer Websocket bolo stratené. Znovu pripájame...", - "Connection with UiServer Websocket recovered.": "Spojenie s UiServer Websocket obnovené.", - "UiServer Websocket error, please reload the page.": "Chyba UiServer Websocket-u, prosím znovu načítajte stránku.", - "   Connecting...": "   Pripájanie...", - "Site size: ": "Veľkosť stránky: ", - "MB is larger than default allowed ": "MB je viac ako povolená hodnota", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otvoriť stránku a nastaviť limit veľkosti na \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " súbory je potrebné stiahnuť", - " downloaded": " stiahnuté", - " download failed": " sťahovanie zlyhalo", - "Peers found: ": "Peer-erov nájdených: ", - "No peers found": "Neboli nájdený žiadny peer-ery", - "Running out of size limit (": "Presahuje povolený limit veľkosti pamäte (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Nastaviť limit na \" + site_info.next_size_limit + \"MB ändern", - "Site size limit changed to {0}MB": "Limit veľkosti pamäte nastavený na {0}MB", - " New version of this page has just released.
    Reload to see the modified content.": " Bola vydaná nová verzia tejto stránky.
    Znovu načítajte túto stránku aby bolo vidieť zmeny.", - "This site requests permission:": "Táto stránka vyžaduje povolenie:", - "_(Accept)": "Udeliť", - - "on": "", - "Oct": "Okt", - "May": "Máj", - "Jun": "Jún", - "Jul": "Júl" - -} diff --git a/src/Translate/languages/sl.json b/src/Translate/languages/sl.json deleted file mode 100644 index 2aeb628e..00000000 --- a/src/Translate/languages/sl.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Čestitke, vaša vrata {0} so odprta.
    Postali ste polnopravni član ZeroNet omrežja!", - "Tor mode active, every connection using Onion route.": "Način Tor aktiven.", - "Successfully started Tor onion hidden services.": "Storitve Tor uspešno zagnane.", - "Unable to start hidden services, please check your config.": "Ni bilo mogoče zagnati Tor storitev. Preverite nastavitve.", - "For faster connections open {0} port on your router.": "Za hitrejše povezave na svojem usmerjevalniku odprite vrata {0}.", - "Your connection is restricted. Please, open {0} port on your router": "Vaša povezava je omejena. Na svojem usmerjevalniku odprite vrata {0}", - "or configure Tor to become a full member of the ZeroNet network.": "ali nastavite Tor, da postanete polnopravni član ZeroNet omrežja.", - - "Select account you want to use in this site:": "Izberite račun, ki ga želite uporabiti na tem spletnem mestu:", - "currently selected": "trenutno izbrano", - "Unique to site": "Edinstven za spletno mesto", - - "Content signing failed": "Podpisovanje vsebine ni uspelo", - "Content publish queued for {0:.0f} seconds.": "Objava vsebine na čakanju za {0:.0f} sekund.", - "Content published to {0} peers.": "Vsebina objavljena na {0} povezavah.", - "No peers found, but your content is ready to access.": "Ni nobenih povezav, vendar je vaša vsebina pripravljena za dostop.", - "Your network connection is restricted. Please, open {0} port": "Vaša povezava je omejena. Prosimo, odprite vrata {0}", - "on your router to make your site accessible for everyone.": "na vašem usmerjevalniku, da bo vaše spletno mesto dostopno za vse.", - "Content publish failed.": "Objavljanje vsebine ni uspelo.", - "This file still in sync, if you write it now, then the previous content may be lost.": "Ta datoteka se še vedno sinhronizira. Če jo uredite zdaj, se lahko zgodi, da bo prejšnja vsebina izgubljena.", - "Write content anyway": "Vseeno uredi vsebino", - "New certificate added:": "Dodano novo potrdilo:", - "You current certificate:": "Trenutno potrdilo:", - "Change it to {auth_type}/{auth_user_name}@{domain}": "Spremenite ga na {auth_type}/{auth_user_name}@{domain}", - "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Potrdilo spremenjeno na: {auth_type}/{auth_user_name}@{domain}.", - "Site cloned": "Stran klonirana", - - "You have successfully changed the web interface's language!": "Uspešno ste spremenili jezik spletnega vmesnika!", - "Due to the browser's caching, the full transformation could take some minute.": "Zaradi predpomnjenja brskalnika lahko popolna preobrazba traja nekaj minut.", - - "Connection with UiServer Websocket was lost. Reconnecting...": "Povezava z UiServer Websocket je bila izgubljena. Ponovno povezovanje ...", - "Connection with UiServer Websocket recovered.": "Povezava z UiServer Websocket je vzpostavljena.", - "UiServer Websocket error, please reload the page.": "Napaka UiServer Websocket. Prosimo osvežite stran.", - "   Connecting...": "   Povezovanje ...", - "Site size: ": "Velikost strani: ", - "MB is larger than default allowed ": "MB je večja od dovoljenih", - "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Odpri to stran in nastavi omejitev na \" + site_info.next_size_limit + \"MB", - " files needs to be downloaded": " datotek mora biti prenešenih", - " downloaded": " preneseno", - " download failed": " prenos ni uspel", - "Peers found: ": "Najdene povezave: ", - "No peers found": "Ni najdenih povezav", - "Running out of size limit (": "Zmanjkuje dovoljenega prostora (", - "Set limit to \" + site_info.next_size_limit + \"MB": "Nastavi omejitev na \" + site_info.next_size_limit + \"MB", - "Site size limit changed to {0}MB": "Omejitev strani nastavljena na{0} MB", - " New version of this page has just released.
    Reload to see the modified content.": " Ravnokar je bila objavljena nova različica te strani.
    Osvežite jo, da boste videli novo vsebino.", - "This site requests permission:": "Ta stran zahteva dovoljenja:", - "_(Accept)": "Dovoli" - -} diff --git a/src/Translate/languages/tr.json b/src/Translate/languages/tr.json index 09a1bdb5..0bdabd89 100644 --- a/src/Translate/languages/tr.json +++ b/src/Translate/languages/tr.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Tebrikler, portunuz ({0}) açık.
    Artık ZeroNet ağına katıldınız!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "Tebrikler, portunuz ({0}) açık.
    Artık ZeroNet ağına katıldınız!", "Tor mode active, every connection using Onion route.": "Tor aktif, tüm bağlantılar Onion yönlendircisini kullanıyor.", "Successfully started Tor onion hidden services.": "Gizli Tor hizmetleri başlatıldı.", "Unable to start hidden services, please check your config.": "Gizli hizmetler başlatılamadı, lütfen ayarlarınızı kontrol ediniz.", "For faster connections open {0} port on your router.": "Daha hızlı bağlantı için {0} nolu portu bilgisayarınıza yönlendirin.", "Your connection is restricted. Please, open {0} port on your router": "Sınırlı bağlantı. Lütfen, {0} nolu portu bilgisayarınıza yönlendirin", - "or configure Tor to become a full member of the ZeroNet network.": "ya da ZeroNet ağına tam olarak katılabilmek için Tor'u kullanın.", + "or configure Tor to become full member of ZeroNet network.": "ya da ZeroNet ağına tam olarak katılabilmek için Tor'u kullanın.", "Select account you want to use in this site:": "Bu sitede kullanmak için bir hesap seçiniz:", "currently selected": "kullanılan", @@ -46,6 +46,6 @@ "Site size limit changed to {0}MB": "Site boyut sınırlaması {0}MB olarak ayarlandı", " New version of this page has just released.
    Reload to see the modified content.": " Bu sayfanın yeni versiyonu yayımlandı.
    Değişen içeriği görmek için yeniden yükleyiniz.", "This site requests permission:": "Bu site bir izin istiyor:", - "_(Accept)": "İzin ver" + "Grant": "İzin ver" } diff --git a/src/Translate/languages/zh-tw.json b/src/Translate/languages/zh-tw.json index 0ec071b4..a30dd023 100644 --- a/src/Translate/languages/zh-tw.json +++ b/src/Translate/languages/zh-tw.json @@ -1,11 +1,11 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "祝賀,你的埠 ({0}) 已經打開。
    你已經是 ZeroNet 網路的正式成員了!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "祝賀,你的埠 ({0}) 已經打開。
    你已經是 ZeroNet 網路的正式成員了!", "Tor mode active, every connection using Onion route.": "Tor 模式啟用,每個連接正在使用洋蔥路由。", "Successfully started Tor onion hidden services.": "成功啟動 Tor 洋蔥隱藏服務。", "Unable to start hidden services, please check your config.": "無法打開隱藏服務,請檢查你的配置。", "For faster connections open {0} port on your router.": "為了更快的連接請在路由器上打開 {0} 埠。", "Your connection is restricted. Please, open {0} port on your router": "你的連接受限制。請在你的路由器上打開 {0} 埠", - "or configure Tor to become a full member of the ZeroNet network.": "或者配置你的 Tor 來成為 ZeroNet 的正式成員。", + "or configure Tor to become full member of ZeroNet network.": "或者配置你的 Tor 來成為 ZeroNet 的正式成員。", "Select account you want to use in this site:": "選擇你要在這個網站使用的帳戶:", "currently selected": "當前選擇", @@ -49,6 +49,6 @@ "Site size limit changed to {0}MB": "網站大小限制已改變到 {0}MB", " New version of this page has just released.
    Reload to see the modified content.": " 本頁面的新版本已經發佈。
    重新載入來查看更改後的內容。", "This site requests permission:": "這個網站的請求許可權:", - "_(Accept)": "授權" + "Grant": "授權" } diff --git a/src/Translate/languages/zh.json b/src/Translate/languages/zh.json index 16a40b1a..e0b1232f 100644 --- a/src/Translate/languages/zh.json +++ b/src/Translate/languages/zh.json @@ -1,14 +1,13 @@ { - "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "祝贺,您的端口 ({0}) 已经打开。
    您已经是 ZeroNet 网络的正式成员了!", + "Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!": "祝贺,你的端口 ({0}) 已经打开。
    你已经是 ZeroNet 网络的正式成员了!", "Tor mode active, every connection using Onion route.": "Tor 模式启用,每个连接正在使用洋葱路由。", "Successfully started Tor onion hidden services.": "成功启动 Tor 洋葱隐藏服务。", - "Unable to start hidden services, please check your config.": "无法打开隐藏服务,请检查您的配置。", + "Unable to start hidden services, please check your config.": "无法打开隐藏服务,请检查你的配置。", "For faster connections open {0} port on your router.": "为了更快的连接请在路由器上打开 {0} 端口。", - "Your connection is restricted. Please, open {0} port on your router": "您的连接受限制。请在您的路由器上打开 {0} 端口", - "or configure Tor to become a full member of the ZeroNet network.": "或者配置您的 Tor 来成为 ZeroNet 的正式成员。", + "Your connection is restricted. Please, open {0} port on your router": "你的连接受限制。请在你的路由器上打开 {0} 端口", + "or configure Tor to become full member of ZeroNet network.": "或者配置你的 Tor 来成为 ZeroNet 的正式成员。", - "Select account you want to use in this site:": "选择您要在这个网站使用的帐户:", - "No certificate": "没有证书", + "Select account you want to use in this site:": "选择你要在这个网站使用的帐户:", "currently selected": "当前选择", "Unique to site": "网站独有身份", @@ -16,20 +15,20 @@ "Content publish queued for {0:.0f} seconds.": "内容已加入 {0:.0f} 秒后的发布队列。", "Content published to {0}/{1} peers.": "内容已发布到 {0}/{1} 个节点。", "Content published to {0} peers.": "内容已发布到 {0} 个节点。", - "No peers found, but your content is ready to access.": "找不到节点,但是您的内容已经准备好被访问。", - "Your network connection is restricted. Please, open {0} port": "您的网络连接受限制。请在您的路由器上打开 {0} 端口", - "on your router to make your site accessible for everyone.": "确保您的站点能被每一个人访问。", + "No peers found, but your content is ready to access.": "找不到节点,但是你的内容已经准备好被访问。", + "Your network connection is restricted. Please, open {0} port": "你的网络连接受限制。请在你的路由器上打开 {0} 端口", + "on your router to make your site accessible for everyone.": "确保你的站点能被每一个人访问。", "Content publish failed.": "内容发布失败。", - "This file still in sync, if you write it now, then the previous content may be lost.": "这个文件仍然在同步中,如果您现在写入它,之前的内容可能会被丢失。", + "This file still in sync, if you write it now, then the previous content may be lost.": "这个文件仍然在同步中,如果你现在写入它,之前的内容可能会被丢失。", "Write content anyway": "强制写入内容", "New certificate added:": "新证书:", - "You current certificate:": "您当前的证书:", + "You current certificate:": "你当前的证书:", "Change it to {auth_type}/{auth_user_name}@{domain}": "更改至 {auth_type}/{auth_user_name}@{domain}-ra", "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "证书更改至:{auth_type}/{auth_user_name}@{domain}。", "Site cloned": "站点已克隆", - "You have successfully changed the web interface's language!": "您已经成功更改了 web 界面的语言!", - "Due to the browser's caching, the full transformation could take some minute.": "由于您的浏览器缓存,完整的翻译可能需要花几分钟。", + "You have successfully changed the web interface's language!": "你已经成功更改了 web 界面的语言!", + "Due to the browser's caching, the full transformation could take some minute.": "由于你的浏览器缓存,完整的翻译可能需要花几分钟。", "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket 的连接已丢失。重新连接中...", "Connection with UiServer Websocket recovered.": "UiServer Websocket 的连接已恢复。", @@ -50,6 +49,6 @@ "Site size limit changed to {0}MB": "站点大小限制已更改到 {0}MB", " New version of this page has just released.
    Reload to see the modified content.": " 本页面的新版本已经发布。
    重新加载来查看更改后的内容。", "This site requests permission:": "这个站点的请求权限:", - "_(Accept)": "授权" + "Grant": "授权" } diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 4a4e0545..44fbd6af 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -3,11 +3,7 @@ import re import os import mimetypes import json -import html -import urllib -import socket - -import gevent +import cgi from Config import config from Site import SiteManager @@ -15,7 +11,6 @@ from User import UserManager from Plugin import PluginManager from Ui.UiWebsocket import UiWebsocket from Crypt import CryptHash -from util import helper status_texts = { 200: "200 OK", @@ -26,27 +21,6 @@ status_texts = { 500: "500 Internal Server Error", } -content_types = { - "asc": "application/pgp-keys", - "css": "text/css", - "gpg": "application/pgp-encrypted", - "html": "text/html", - "js": "application/javascript", - "json": "application/json", - "oga": "audio/ogg", - "ogg": "application/ogg", - "ogv": "video/ogg", - "sig": "application/pgp-signature", - "txt": "text/plain", - "webmanifest": "application/manifest+json", - "wasm": "application/wasm", - "webp": "image/webp" -} - - -class SecurityError(Exception): - pass - @PluginManager.acceptPlugins class UiRequest(object): @@ -65,99 +39,27 @@ class UiRequest(object): self.start_response = start_response # Start response function self.user = None - self.script_nonce = None # Nonce for script tags in wrapper html - - def learnHost(self, host): - self.server.allowed_hosts.add(host) - self.server.log.info("Added %s as allowed host" % host) - - def isHostAllowed(self, host): - if host in self.server.allowed_hosts: - return True - - # Allow any IP address as they are not affected by DNS rebinding - # attacks - if helper.isIp(host): - self.learnHost(host) - return True - - if ":" in host and helper.isIp(host.rsplit(":", 1)[0]): # Test without port - self.learnHost(host) - return True - - if self.isProxyRequest(): # Support for chrome extension proxy - if self.isDomain(host): - return True - else: - return False - - return False - - def isDomain(self, address): - return self.server.site_manager.isDomainCached(address) - - def resolveDomain(self, domain): - return self.server.site_manager.resolveDomainCached(domain) # Call the request handler function base on path def route(self, path): - # Restict Ui access by ip - if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: + if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip return self.error403(details=False) - # Check if host allowed to do request - if not self.isHostAllowed(self.env.get("HTTP_HOST")): - ret_error = next(self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False)) - - http_get = self.env["PATH_INFO"] - if self.env["QUERY_STRING"]: - http_get += "?{0}".format(self.env["QUERY_STRING"]) - self_host = self.env["HTTP_HOST"].split(":")[0] - self_ip = self.env["HTTP_HOST"].replace(self_host, socket.gethostbyname(self_host)) - link = "http://{0}{1}".format(self_ip, http_get) - ret_body = """ -

    Start the client with --ui_host "{host}" argument

    -

    or access via ip: {link}

    - """.format( - host=html.escape(self.env["HTTP_HOST"]), - link=html.escape(link) - ).encode("utf8") - return iter([ret_error, ret_body]) - - # Prepend .bit host for transparent proxy - if self.isDomain(self.env.get("HTTP_HOST")): - path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path) path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access - # Sanitize request url - path = path.replace("\\", "/") - if "../" in path or "./" in path: - return self.error403("Invalid path: %s" % path) - if self.env["REQUEST_METHOD"] == "OPTIONS": if "/" not in path.strip("/"): content_type = self.getContentType("index.html") else: content_type = self.getContentType(path) - - extra_headers = {"Access-Control-Allow-Origin": "null"} - - self.sendHeader(content_type=content_type, extra_headers=extra_headers, noscript=True) + self.sendHeader(content_type=content_type) return "" if path == "/": return self.actionIndex() - elif path in ("/favicon.ico", "/apple-touch-icon.png"): - return self.actionFile("src/Ui/media/img/%s" % path) - # Internal functions - elif "/ZeroNet-Internal/" in path: - path = re.sub(".*?/ZeroNet-Internal/", "/", path) - func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function - if func: - return func() - else: - return self.error404(path) + elif path == "/favicon.ico": + return self.actionFile("src/Ui/media/img/favicon.ico") # Media elif path.startswith("/uimedia/"): return self.actionUiMedia(path) @@ -173,36 +75,24 @@ class UiRequest(object): return self.actionDebug() elif path == "/Console" and config.debug: return self.actionConsole() - # Wrapper-less static files - elif path.startswith("/raw/"): - return self.actionSiteMedia(path.replace("/raw", "/media", 1), header_noscript=True) - - elif path.startswith("/add/"): - return self.actionSiteAdd() # Site media wrapper else: if self.get.get("wrapper_nonce"): - if self.get["wrapper_nonce"] in self.server.wrapper_nonces: - self.server.wrapper_nonces.remove(self.get["wrapper_nonce"]) - return self.actionSiteMedia("/media" + path) # Only serve html files with frame - else: - self.server.log.warning("Invalid wrapper nonce: %s" % self.get["wrapper_nonce"]) - body = self.actionWrapper(path) + return self.actionSiteMedia("/media" + path) # Only serve html files with frame else: body = self.actionWrapper(path) if body: return body else: - func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function + func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function if func: return func() else: - ret = self.error404(path) - return ret + return self.error404(path) - # The request is proxied by chrome extension or a transparent proxy + # The request is proxied by chrome extension def isProxyRequest(self): - return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.isDomain(self.env.get("HTTP_HOST"))) + return self.env["PATH_INFO"].startswith("http://") def isWebSocketRequest(self): return self.env.get("HTTP_UPGRADE") == "websocket" @@ -212,25 +102,22 @@ class UiRequest(object): # Get mime by filename def getContentType(self, file_name): - file_name = file_name.lower() - ext = file_name.rsplit(".", 1)[-1] + content_type = mimetypes.guess_type(file_name)[0] - if ext in content_types: - content_type = content_types[ext] - elif ext in ("ttf", "woff", "otf", "woff2", "eot", "sfnt", "collection"): - content_type = "font/%s" % ext - else: - content_type = mimetypes.guess_type(file_name)[0] + if file_name.endswith(".css"): # Force correct css content type + content_type = "text/css" if not content_type: - content_type = "application/octet-stream" - - return content_type.lower() + if file_name.endswith(".json"): # Correct json header + content_type = "application/json" + else: + content_type = "application/octet-stream" + return content_type # Return: Posted variables def getPosted(self): if self.env['REQUEST_METHOD'] == "POST": - return dict(urllib.parse.parse_qsl( + return dict(cgi.parse_qsl( self.env['wsgi.input'].readline().decode() )) else: @@ -240,7 +127,7 @@ class UiRequest(object): def getCookies(self): raw_cookies = self.env.get('HTTP_COOKIE') if raw_cookies: - cookies = urllib.parse.parse_qsl(raw_cookies) + cookies = cgi.parse_qsl(raw_cookies) return {key.strip(): val for key, val in cookies} else: return {} @@ -253,144 +140,66 @@ class UiRequest(object): self.user = UserManager.user_manager.create() return self.user - def getRequestUrl(self): - if self.isProxyRequest(): - if self.env["PATH_INFO"].startswith("http://zero/"): - return self.env["PATH_INFO"] - else: # Add http://zero to direct domain access - return self.env["PATH_INFO"].replace("http://", "http://zero/", 1) - else: - return self.env["wsgi.url_scheme"] + "://" + self.env["HTTP_HOST"] + self.env["PATH_INFO"] - - def getReferer(self): - referer = self.env.get("HTTP_REFERER") - if referer and self.isProxyRequest() and not referer.startswith("http://zero/"): - return referer.replace("http://", "http://zero/", 1) - else: - return referer - - def isScriptNonceSupported(self): - user_agent = self.env.get("HTTP_USER_AGENT") - if "Edge/" in user_agent: - is_script_nonce_supported = False - elif "Safari/" in user_agent and "Chrome/" not in user_agent: - is_script_nonce_supported = False - else: - is_script_nonce_supported = True - return is_script_nonce_supported - # Send response headers - def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]): - headers = {} - headers["Version"] = "HTTP/1.1" - headers["Connection"] = "Keep-Alive" - headers["Keep-Alive"] = "max=25, timeout=30" - headers["X-Frame-Options"] = "SAMEORIGIN" - if content_type != "text/html" and self.env.get("HTTP_REFERER") and self.isSameOrigin(self.getReferer(), self.getRequestUrl()): - headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css - - if noscript: - headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src *; font-src * data:; media-src *; style-src * 'unsafe-inline';" - elif script_nonce and self.isScriptNonceSupported(): - headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self' blob: data:; style-src 'self' blob: 'unsafe-inline'; connect-src *; frame-src 'self' blob:".format(script_nonce) - - if allow_ajax: - headers["Access-Control-Allow-Origin"] = "null" - + def sendHeader(self, status=200, content_type="text/html", extra_headers=[]): + headers = [] + headers.append(("Version", "HTTP/1.1")) + headers.append(("Connection", "Keep-Alive")) + headers.append(("Keep-Alive", "max=25, timeout=30")) + if content_type != "text/html": + headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access on non-html files + headers.append(("X-Frame-Options", "SAMEORIGIN")) + # headers.append(("Content-Security-Policy", "default-src 'self' data: 'unsafe-inline' ws://127.0.0.1:* http://127.0.0.1:* wss://tracker.webtorrent.io; sandbox allow-same-origin allow-top-navigation allow-scripts")) # Only local connections if self.env["REQUEST_METHOD"] == "OPTIONS": # Allow json access - headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range" - headers["Access-Control-Allow-Credentials"] = "true" + headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Cookie")) + headers.append(("Access-Control-Allow-Credentials", "true")) - # Download instead of display file types that can be dangerous - if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type): - headers["Content-Disposition"] = "attachment" + if content_type == "text/html": + content_type = "text/html; charset=utf-8" + if content_type == "text/plain": + content_type = "text/plain; charset=utf-8" cacheable_type = ( - self.env["REQUEST_METHOD"] == "OPTIONS" or - content_type.split("/", 1)[0] in ("image", "video", "font") or - content_type in ("application/javascript", "text/css") + content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or + self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript" ) - if content_type in ("text/plain", "text/html", "text/css", "application/javascript", "application/json", "application/manifest+json"): - content_type += "; charset=utf-8" - if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min - headers["Cache-Control"] = "public, max-age=600" # Cache 10 min + headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min else: - headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all - headers["Content-Type"] = content_type - headers.update(extra_headers) - return self.start_response(status_texts[status], list(headers.items())) + headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all + headers.append(("Content-Type", content_type)) + for extra_header in extra_headers: + headers.append(extra_header) + return self.start_response(status_texts[status], headers) # Renders a template def render(self, template_path, *args, **kwargs): - template = open(template_path, encoding="utf8").read() - - def renderReplacer(m): - if m.group(1) in kwargs: - return "%s" % kwargs.get(m.group(1), "") - else: - return m.group(0) - - template_rendered = re.sub("{(.*?)}", renderReplacer, template) - - return template_rendered.encode("utf8") - - def isWrapperNecessary(self, path): - match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) - - if not match: - return True - - inner_path = match.group("inner_path").lstrip("/") - if not inner_path or path.endswith("/"): # It's a directory - content_type = self.getContentType("index.html") - else: # It's a file - content_type = self.getContentType(inner_path) - - is_html_file = "html" in content_type or "xhtml" in content_type - - return is_html_file - - @helper.encodeResponse - def formatRedirect(self, url): - return """ - - - Redirecting to {0} - - - - """.format(html.escape(url)) + template = open(template_path).read().decode("utf8") + return template.format(**kwargs).encode("utf8") # - Actions - # Redirect to an url def actionRedirect(self, url): - self.start_response('301 Redirect', [('Location', str(url))]) - yield self.formatRedirect(url) + self.start_response('301 Redirect', [('Location', url)]) + yield "Location changed: %s" % url def actionIndex(self): - return self.actionRedirect("/" + config.homepage + "/") + return self.actionRedirect("/" + config.homepage) # Render a file from media with iframe site wrapper def actionWrapper(self, path, extra_headers=None): if not extra_headers: - extra_headers = {} - script_nonce = self.getScriptNonce() + extra_headers = [] - match = re.match(r"/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) - just_added = False + match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) if match: address = match.group("address") inner_path = match.group("inner_path").lstrip("/") - - if not self.isWrapperNecessary(path): - return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper - + if "." in inner_path and not inner_path.endswith(".html"): + return self.actionSiteMedia("/media" + path) # Only serve html files with frame if self.isAjaxRequest(): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper @@ -398,73 +207,32 @@ class UiRequest(object): return self.error403("WebSocket request not allowed to load wrapper") # No websocket if "text/html" not in self.env.get("HTTP_ACCEPT", ""): - return self.error403("Invalid Accept header to load wrapper: %s" % self.env.get("HTTP_ACCEPT", "")) + return self.error403("Invalid Accept header to load wrapper") if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""): return self.error403("Prefetch not allowed to load wrapper") site = SiteManager.site_manager.get(address) - if site and site.content_manager.contents.get("content.json"): + if ( + site and site.content_manager.contents.get("content.json") and + (not site.getReachableBadFiles() or site.settings["own"]) + ): # Its downloaded or own title = site.content_manager.contents["content.json"]["title"] else: title = "Loading %s..." % address - site = SiteManager.site_manager.get(address) - if site: # Already added, but not downloaded - if time.time() - site.announcer.time_last_announce > 5: - site.log.debug("Reannouncing site...") - gevent.spawn(site.update, announce=True) - else: # If not added yet - site = SiteManager.site_manager.need(address) - just_added = True + site = SiteManager.site_manager.need(address) # Start download site if not site: return False - self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) - - min_last_announce = (time.time() - site.announcer.time_last_announce) / 60 - if min_last_announce > 60 and site.isServing() and not just_added: - site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce) - gevent.spawn(site.update, announce=True) - - return iter([self.renderWrapper(site, path, inner_path, title, extra_headers, script_nonce=script_nonce)]) - # Make response be sent at once (see https://github.com/HelloZeroNet/ZeroNet/issues/1092) + self.sendHeader(extra_headers=extra_headers[:]) + return iter([self.renderWrapper(site, path, inner_path, title, extra_headers)]) + # Dont know why wrapping with iter necessary, but without it around 100x slower else: # Bad url return False - def getSiteUrl(self, address): - if self.isProxyRequest(): - return "http://zero/" + address - else: - return "/" + address - - def getWsServerUrl(self): - if self.isProxyRequest(): - if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 - server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] - else: # Remote client, use SERVER_NAME as server's real address - server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) - else: - server_url = "" - return server_url - - def processQueryString(self, site, query_string): - match = re.search("zeronet_peers=(.*?)(&|$)", query_string) - if match: - query_string = query_string.replace(match.group(0), "") - num_added = 0 - for peer in match.group(1).split(","): - if not re.match(".*?:[0-9]+$", peer): - continue - ip, port = peer.rsplit(":", 1) - if site.addPeer(ip, int(port), source="query_string"): - num_added += 1 - site.log.debug("%s peers added by query string" % num_added) - - return query_string - - def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None, script_nonce=None): + def renderWrapper(self, site, path, inner_path, title, extra_headers): file_inner_path = inner_path if not file_inner_path: file_inner_path = "index.html" # If inner path defaults to index.html @@ -474,118 +242,76 @@ class UiRequest(object): address = re.sub("/.*", "", path.lstrip("/")) if self.isProxyRequest() and (not path or "/" in path[1:]): + file_url = re.sub(".*/", "", inner_path) if self.env["HTTP_HOST"] == "zero": root_url = "/" + address + "/" - file_url = "/" + address + "/" + inner_path else: - file_url = "/" + inner_path root_url = "/" else: file_url = "/" + address + "/" + inner_path root_url = "/" + address + "/" - if self.isProxyRequest(): - self.server.allowed_ws_origins.add(self.env["HTTP_HOST"]) - # Wrapper variable inits + query_string = "" body_style = "" meta_tags = "" postmessage_nonce_security = "false" wrapper_nonce = self.getWrapperNonce() - inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", "")) - if "?" in inner_path: - sep = "&" + if self.env.get("QUERY_STRING"): + query_string = "?%s&wrapper_nonce=%s" % (self.env["QUERY_STRING"], wrapper_nonce) else: - sep = "?" - - if inner_query_string: - inner_query_string = "%s%s&wrapper_nonce=%s" % (sep, inner_query_string, wrapper_nonce) - else: - inner_query_string = "%swrapper_nonce=%s" % (sep, wrapper_nonce) + query_string = "?wrapper_nonce=%s" % wrapper_nonce if self.isProxyRequest(): # Its a remote proxy request + if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 + server_url = "http://127.0.0.1:%s" % self.env["SERVER_PORT"] + else: # Remote client, use SERVER_NAME as server's real address + server_url = "http://%s:%s" % (self.env["SERVER_NAME"], self.env["SERVER_PORT"]) homepage = "http://zero/" + config.homepage else: # Use relative path + server_url = "" homepage = "/" + config.homepage - server_url = self.getWsServerUrl() # Real server url for WS connections - - user = self.getCurrentUser() - if user: - theme = user.settings.get("theme", "light") - else: - theme = "light" - - themeclass = "theme-%-6s" % re.sub("[^a-z]", "", theme) - if site.content_manager.contents.get("content.json"): # Got content.json content = site.content_manager.contents["content.json"] if content.get("background-color"): - background_color = content.get("background-color-%s" % theme, content["background-color"]) - body_style += "background-color: %s;" % html.escape(background_color) + body_style += "background-color: %s;" % \ + cgi.escape(site.content_manager.contents["content.json"]["background-color"], True) if content.get("viewport"): - meta_tags += '' % html.escape(content["viewport"]) + meta_tags += '' % cgi.escape(content["viewport"], True) if content.get("favicon"): - meta_tags += '' % (root_url, html.escape(content["favicon"])) + meta_tags += '' % (root_url, cgi.escape(content["favicon"], True)) if content.get("postmessage_nonce_security"): postmessage_nonce_security = "true" - sandbox_permissions = "" - - if "NOSANDBOX" in site.settings["permissions"]: - sandbox_permissions += " allow-same-origin" - - if show_loadingscreen is None: - show_loadingscreen = not site.storage.isFile(file_inner_path) - - if show_loadingscreen: - meta_tags += ''; - - def xescape(s): - '''combines parts from re.escape & html.escape''' - # https://github.com/python/cpython/blob/3.10/Lib/re.py#L267 - # '&' is handled otherwise - re_chars = {i: '\\' + chr(i) for i in b'()[]{}*+-|^$\\.~# \t\n\r\v\f'} - # https://github.com/python/cpython/blob/3.10/Lib/html/__init__.py#L12 - html_chars = { - '<' : '<', - '>' : '>', - '"' : '"', - "'" : ''', - } - # we can't replace '&' because it makes certain zites work incorrectly - # it should however in no way interfere with re.sub in render - repl = {} - repl.update(re_chars) - repl.update(html_chars) - return s.translate(repl) + if site.settings.get("own"): + sandbox_permissions = "allow-modals" # For coffeescript compile errors + else: + sandbox_permissions = "" return self.render( "src/Ui/template/wrapper.html", server_url=server_url, inner_path=inner_path, - file_url=xescape(file_url), - file_inner_path=xescape(file_inner_path), + file_url=re.escape(file_url), + file_inner_path=re.escape(file_inner_path), address=site.address, - title=xescape(title), + title=cgi.escape(title, True), body_style=body_style, meta_tags=meta_tags, - query_string=xescape(inner_query_string), + query_string=re.escape(query_string), wrapper_key=site.settings["wrapper_key"], - ajax_key=site.settings["ajax_key"], wrapper_nonce=wrapper_nonce, postmessage_nonce_security=postmessage_nonce_security, permissions=json.dumps(site.settings["permissions"]), - show_loadingscreen=json.dumps(show_loadingscreen), + show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)), sandbox_permissions=sandbox_permissions, rev=config.rev, lang=config.language, - homepage=homepage, - themeclass=themeclass, - script_nonce=script_nonce + homepage=homepage ) # Create a new wrapper nonce that allows to get one html file without the wrapper @@ -594,200 +320,119 @@ class UiRequest(object): self.server.wrapper_nonces.append(wrapper_nonce) return wrapper_nonce - def getScriptNonce(self): - if not self.script_nonce: - self.script_nonce = CryptHash.random(encoding="base64") - - return self.script_nonce - - # Create a new wrapper nonce that allows to get one site - def getAddNonce(self): - add_nonce = CryptHash.random() - self.server.add_nonces.append(add_nonce) - return add_nonce - - def isSameOrigin(self, url_a, url_b): - if not url_a or not url_b: + # Returns if media request allowed from that referer + def isMediaRequestAllowed(self, site_address, referer): + if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]): return False - - url_a = url_a.replace("/raw/", "/") - url_b = url_b.replace("/raw/", "/") - - origin_pattern = "http[s]{0,1}://(.*?/.*?/).*" - is_origin_full = re.match(origin_pattern, url_a) - if not is_origin_full: # Origin looks trimmed to host, require only same host - origin_pattern = "http[s]{0,1}://(.*?/).*" - - origin_a = re.sub(origin_pattern, "\\1", url_a) - origin_b = re.sub(origin_pattern, "\\1", url_b) - - return origin_a == origin_b + referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address + return referer_path.startswith("/" + site_address) # Return {address: 1Site.., inner_path: /data/users.json} from url path def parsePath(self, path): - path = path.replace("\\", "/") path = path.replace("/index.html/", "/") # Base Backward compatibility fix if path.endswith("/"): path = path + "index.html" - if "../" in path or "./" in path: - raise SecurityError("Invalid path") + if ".." in path: + raise Exception("Invalid path") - match = re.match(r"/media/(?P
    [A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) + match = re.match("/media/(?P
    [A-Za-z0-9\._-]+)/(?P.*)", path) if match: path_parts = match.groupdict() - if self.isDomain(path_parts["address"]): - path_parts["address"] = self.resolveDomain(path_parts["address"]) path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites) - path_parts["inner_path"] = path_parts["inner_path"].lstrip("/") - if not path_parts["inner_path"]: - path_parts["inner_path"] = "index.html" return path_parts else: return None # Serve a media for site - def actionSiteMedia(self, path, header_length=True, header_noscript=False): - try: - path_parts = self.parsePath(path) - except SecurityError as err: - return self.error403(err) + def actionSiteMedia(self, path, header_length=True): + if ".." in path: # File not in allowed path + return self.error403("Invalid file path") - if not path_parts: + path_parts = self.parsePath(path) + + # Check wrapper nonce + content_type = self.getContentType(path_parts["inner_path"]) + if "htm" in content_type: # Valid nonce must present to render html files + wrapper_nonce = self.get.get("wrapper_nonce") + if wrapper_nonce not in self.server.wrapper_nonces: + return self.error403("Wrapper nonce error. Please reload the page.") + self.server.wrapper_nonces.remove(self.get["wrapper_nonce"]) + + referer = self.env.get("HTTP_REFERER") + if referer and path_parts: # Only allow same site to receive media + if not self.isMediaRequestAllowed(path_parts["request_address"], referer): + self.log.error("Media referrer error: %s not allowed from %s" % (path_parts["address"], referer)) + return self.error403("Media referrer error") # Referrer not starts same address as requested path + + if path_parts: # Looks like a valid path + address = path_parts["address"] + file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) + if config.debug and file_path.split("/")[-1].startswith("all."): + # If debugging merge *.css to all.css and *.js to all.js + site = self.server.sites.get(address) + if site.settings["own"]: + from Debug import DebugMedia + DebugMedia.merge(file_path) + if not address or address == ".": + return self.error403(path_parts["inner_path"]) + if os.path.isfile(file_path): # File exists + return self.actionFile(file_path, header_length=header_length) + elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect + return self.actionRedirect("./{0}/".format(path_parts["inner_path"].split("/")[-1])) + else: # File not exists, try to download + if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading + return self.error404(path_parts["inner_path"]) + + site = SiteManager.site_manager.need(address) + + if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites + return self.actionFile("src/Ui/media/img/favicon.ico") + + result = site.needFile(path_parts["inner_path"], priority=5) # Wait until file downloads + if result: + return self.actionFile(file_path, header_length=header_length) + else: + self.log.debug("File not found: %s" % path_parts["inner_path"]) + # Site larger than allowed, re-add wrapper nonce to allow reload + if site.settings.get("size", 0) > site.getSizeLimit() * 1024 * 1024: + self.server.wrapper_nonces.append(self.get.get("wrapper_nonce")) + return self.error404(path_parts["inner_path"]) + + else: # Bad url return self.error404(path) - address = path_parts["address"] - - file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) - - if (config.debug or config.merge_media) and file_path.split("/")[-1].startswith("all."): - # If debugging merge *.css to all.css and *.js to all.js - site = self.server.sites.get(address) - if site and site.settings["own"]: - from Debug import DebugMedia - DebugMedia.merge(file_path) - - if not address or address == ".": - return self.error403(path_parts["inner_path"]) - - header_allow_ajax = False - if self.get.get("ajax_key"): - site = SiteManager.site_manager.get(path_parts["request_address"]) - if self.get["ajax_key"] == site.settings["ajax_key"]: - header_allow_ajax = True - else: - return self.error403("Invalid ajax_key") - - file_size = helper.getFilesize(file_path) - - if file_size is not None: - return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) - - elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect - if path_parts["inner_path"]: - return self.actionRedirect("./%s/" % path_parts["inner_path"].split("/")[-1]) - else: - return self.actionRedirect("./%s/" % path_parts["address"]) - - else: # File not exists, try to download - if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading - return self.actionSiteAddPrompt(path) - - site = SiteManager.site_manager.need(address) - - if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites - return self.actionFile("src/Ui/media/img/favicon.ico") - - result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads - if result: - file_size = helper.getFilesize(file_path) - return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) - else: - self.log.debug("File not found: %s" % path_parts["inner_path"]) - return self.error404(path) - # Serve a media for ui def actionUiMedia(self, path): match = re.match("/uimedia/(?P.*)", path) if match: # Looks like a valid path file_path = "src/Ui/media/%s" % match.group("inner_path") allowed_dir = os.path.abspath("src/Ui/media") # Only files within data/sitehash allowed - if "../" in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): + if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path return self.error403() else: - if (config.debug or config.merge_media) and match.group("inner_path").startswith("all."): + if config.debug and match.group("inner_path").startswith("all."): # If debugging merge *.css to all.css and *.js to all.js from Debug import DebugMedia DebugMedia.merge(file_path) return self.actionFile(file_path, header_length=False) # Dont's send site to allow plugins append content - else: # Bad url return self.error400() - def actionSiteAdd(self): - post_data = self.env["wsgi.input"].read().decode() - post = dict(urllib.parse.parse_qsl(post_data)) - if post["add_nonce"] not in self.server.add_nonces: - return self.error403("Add nonce error.") - self.server.add_nonces.remove(post["add_nonce"]) - SiteManager.site_manager.need(post["address"]) - return self.actionRedirect(post["url"]) - - @helper.encodeResponse - def actionSiteAddPrompt(self, path): - path_parts = self.parsePath(path) - if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]): - return self.error404(path) - - self.sendHeader(200, "text/html", noscript=True) - template = open("src/Ui/template/site_add.html").read() - template = template.replace("{url}", html.escape(self.env["PATH_INFO"])) - template = template.replace("{address}", path_parts["address"]) - template = template.replace("{add_nonce}", self.getAddNonce()) - return template - - def replaceHtmlVariables(self, block, path_parts): - user = self.getCurrentUser() - if user and user.settings: - themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) - else: - themeclass = "theme-light" - block = block.replace(b"{themeclass}", themeclass.encode("utf8")) - - if path_parts: - site = self.server.sites.get(path_parts.get("address")) - if site.settings["own"]: - modified = int(time.time()) - else: - modified = int(site.content_manager.contents["content.json"]["modified"]) - block = block.replace(b"{site_modified}", str(modified).encode("utf8")) - - return block - # Stream a file to client - def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, extra_headers={}, file_size=None, file_obj=None, path_parts=None): - file_name = os.path.basename(file_path) - - if file_size is None: - file_size = helper.getFilesize(file_path) - - if file_size is not None: + def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True): + if os.path.isfile(file_path): # Try to figure out content type by extension - content_type = self.getContentType(file_name) + content_type = self.getContentType(file_path) + # TODO: Dont allow external access: extra_headers= + # [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")] range = self.env.get("HTTP_RANGE") range_start = None - - is_html_file = file_name.endswith(".html") - if is_html_file: - header_length = False - if send_header: - extra_headers = extra_headers.copy() - content_encoding = self.get.get("zeronet_content_encoding", "") - if all(part.strip() in ("gzip", "compress", "deflate", "identity", "br") for part in content_encoding.split(",")): - extra_headers["Content-Encoding"] = content_encoding + extra_headers = {} + file_size = os.path.getsize(file_path) extra_headers["Accept-Ranges"] = "bytes" if header_length: extra_headers["Content-Length"] = str(file_size) @@ -803,101 +448,75 @@ class UiRequest(object): status = 206 else: status = 200 - self.sendHeader(status, content_type=content_type, noscript=header_noscript, allow_ajax=header_allow_ajax, extra_headers=extra_headers) + self.sendHeader(status, content_type=content_type, extra_headers=extra_headers.items()) if self.env["REQUEST_METHOD"] != "OPTIONS": - if not file_obj: - file_obj = open(file_path, "rb") - + file = open(file_path, "rb") if range_start: - file_obj.seek(range_start) + file.seek(range_start) while 1: try: - block = file_obj.read(block_size) - if is_html_file: - block = self.replaceHtmlVariables(block, path_parts) + block = file.read(block_size) if block: yield block else: raise StopIteration except StopIteration: - file_obj.close() + file.close() break else: # File not exists - for part in self.error404(str(file_path)): - yield part + yield self.error404(file_path) # On websocket connection def actionWebsocket(self): ws = self.env.get("wsgi.websocket") - if ws: - # Allow only same-origin websocket requests - origin = self.env.get("HTTP_ORIGIN") - host = self.env.get("HTTP_HOST") - # Allow only same-origin websocket requests - if origin: - origin_host = origin.split("://", 1)[-1] - if origin_host != host and origin_host not in self.server.allowed_ws_origins: - error_message = "Invalid origin: %s (host: %s, allowed: %s)" % (origin, host, self.server.allowed_ws_origins) - ws.send(json.dumps({"error": error_message})) - return self.error403(error_message) - - # Find site by wrapper_key wrapper_key = self.get["wrapper_key"] + # Find site by wrapper_key site = None - for site_check in list(self.server.sites.values()): + for site_check in self.server.sites.values(): if site_check.settings["wrapper_key"] == wrapper_key: site = site_check if site: # Correct wrapper key - try: - user = self.getCurrentUser() - except Exception as err: - ws.send(json.dumps({"error": "Error in data/user.json: %s" % err})) - return self.error500("Error in data/user.json: %s" % err) + user = self.getCurrentUser() if not user: - ws.send(json.dumps({"error": "No user found"})) - return self.error403("No user found") + self.log.error("No user found") + return self.error403() ui_websocket = UiWebsocket(ws, site, self.server, user, self) site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events - self.server.websockets.append(ui_websocket) ui_websocket.start() - self.server.websockets.remove(ui_websocket) - for site_check in list(self.server.sites.values()): + for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels) if ui_websocket in site_check.websockets: site_check.websockets.remove(ui_websocket) - return [b"Bye."] + return "Bye." else: # No site found by wrapper key - ws.send(json.dumps({"error": "Wrapper key not found: %s" % wrapper_key})) - return self.error403("Wrapper key not found: %s" % wrapper_key) + self.log.error("Wrapper key not found: %s" % wrapper_key) + return self.error403() else: self.start_response("400 Bad Request", []) - return [b"Not a websocket request!"] + return "Not a websocket!" # Debug last error def actionDebug(self): # Raise last error from DebugHook - import main - last_error = main.DebugHook.last_error + import sys + last_error = sys.modules["main"].DebugHook.last_error if last_error: - raise last_error[0](last_error[1]).with_traceback(last_error[2]) + raise last_error[0], last_error[1], last_error[2] else: self.sendHeader() - return [b"No error! :)"] + return "No error! :)" # Just raise an error to get console def actionConsole(self): import sys sites = self.server.sites main = sys.modules["main"] - - def bench(code, times=100, init=None): + def bench(code, times=100): sites = self.server.sites main = sys.modules["main"] s = time.time() - if init: - eval(compile(init, '', 'exec'), globals(), locals()) for _ in range(times): back = eval(code, globals(), locals()) return ["%s run: %.3fs" % (times, time.time() - s), back] @@ -918,34 +537,31 @@ class UiRequest(object): # Send bad request error def error400(self, message=""): - self.sendHeader(400, noscript=True) - self.log.error("Error 400: %s" % message) + self.sendHeader(400) return self.formatError("Bad Request", message) # You are not allowed to access this def error403(self, message="", details=True): - self.sendHeader(403, noscript=True) - self.log.warning("Error 403: %s" % message) + self.sendHeader(403) + self.log.debug("Error 403: %s" % message) return self.formatError("Forbidden", message, details=details) # Send file not found error def error404(self, path=""): - self.sendHeader(404, noscript=True) - return self.formatError("Not Found", path, details=False) + self.sendHeader(404) + return self.formatError("Not Found", cgi.escape(path.encode("utf8")), details=False) # Internal server error def error500(self, message=":("): - self.sendHeader(500, noscript=True) - self.log.error("Error 500: %s" % message) - return self.formatError("Server error", message) + self.sendHeader(500) + return self.formatError("Server error", cgi.escape(message)) - @helper.encodeResponse def formatError(self, title, message, details=True): import sys import gevent - if details and config.debug: - details = {key: val for key, val in list(self.env.items()) if hasattr(val, "endswith") and "COOKIE" not in key} + if details: + details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key} details["version_zeronet"] = "%s r%s" % (config.version, config.rev) details["version_python"] = sys.version details["version_gevent"] = gevent.__version__ @@ -959,16 +575,22 @@ class UiRequest(object):

    %s

    %s

    -

    Please report it if you think this an error.

    +

    Please report it if you think this an error.

    Details:

    %s
    - """ % (title, html.escape(message), html.escape(json.dumps(details, indent=4, sort_keys=True))) + """ % (title, message, json.dumps(details, indent=4, sort_keys=True)) else: return """ -

    %s

    %s

    - """ % (title, html.escape(message)) + """ % (title, message) + + +# - Reload for eaiser developing - +# def reload(): + # import imp, sys + # global UiWebsocket + # UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket + # reload(sys.modules["User.UserManager"]) + # UserManager.reloadModule() + # self.user = UserManager.user_manager.getCurrent() diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py index 61943ada..632936fc 100644 --- a/src/Ui/UiServer.py +++ b/src/Ui/UiServer.py @@ -1,21 +1,22 @@ import logging import time -import urllib +import cgi import socket +import sys import gevent from gevent.pywsgi import WSGIServer -from lib.gevent_ws import WebSocketHandler +from gevent.pywsgi import WSGIHandler +from lib.geventwebsocket.handler import WebSocketHandler -from .UiRequest import UiRequest +from UiRequest import UiRequest from Site import SiteManager from Config import config from Debug import Debug -import importlib # Skip websocket handler if not necessary -class UiWSGIHandler(WebSocketHandler): +class UiWSGIHandler(WSGIHandler): def __init__(self, *args, **kwargs): self.server = args[2] @@ -23,25 +24,25 @@ class UiWSGIHandler(WebSocketHandler): self.args = args self.kwargs = kwargs - def handleError(self, err): - if config.debug: # Allow websocket errors to appear on /Debug - import main - main.DebugHook.handleError() - else: - ui_request = UiRequest(self.server, {}, self.environ, self.start_response) - block_gen = ui_request.error500("UiWSGIHandler error: %s" % Debug.formatExceptionMessage(err)) - for block in block_gen: - self.write(block) - def run_application(self): - err_name = "UiWSGIHandler websocket" if "HTTP_UPGRADE" in self.environ else "UiWSGIHandler" - try: - super(UiWSGIHandler, self).run_application() - except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError) as err: - logging.warning("%s connection error: %s" % (err_name, err)) - except Exception as err: - logging.warning("%s error: %s" % (err_name, Debug.formatException(err))) - self.handleError(err) + if "HTTP_UPGRADE" in self.environ: # Websocket request + try: + ws_handler = WebSocketHandler(*self.args, **self.kwargs) + ws_handler.__dict__ = self.__dict__ # Match class variables + ws_handler.run_application() + except Exception, err: + logging.error("UiWSGIHandler websocket error: %s" % Debug.formatException(err)) + if config.debug: # Allow websocket errors to appear on /Debug + import sys + sys.modules["main"].DebugHook.handleError() + else: # Standard HTTP request + try: + super(UiWSGIHandler, self).run_application() + except Exception, err: + logging.error("UiWSGIHandler error: %s" % Debug.formatException(err)) + if config.debug: # Allow websocket errors to appear on /Debug + import sys + sys.modules["main"].DebugHook.handleError() def handle(self): # Save socket to be able to close them properly on exit @@ -51,42 +52,16 @@ class UiWSGIHandler(WebSocketHandler): class UiServer: + def __init__(self): self.ip = config.ui_ip self.port = config.ui_port - self.running = False if self.ip == "*": - self.ip = "0.0.0.0" # Bind all - if config.ui_host: - self.allowed_hosts = set(config.ui_host) - #TODO: For proxies allow sub domains(www) as valid hosts, should be user preference. - elif config.ui_ip == "127.0.0.1": - # IP Addresses are inherently allowed as they are immune to DNS - # rebinding attacks. - self.allowed_hosts = set(["zero", "localhost:%s" % config.ui_port]) - # "URI producers and normalizers should omit the port component and - # its ':' delimiter if port is empty or if its value would be the - # same as that of the scheme's default." - # Source: https://tools.ietf.org/html/rfc3986#section-3.2.3 - # As a result, we need to support portless hosts if port 80 is in - # use. - if config.ui_port == 80: - self.allowed_hosts.update(["localhost"]) - else: - self.allowed_hosts = set([]) - self.allowed_ws_origins = set() - self.allow_trans_proxy = config.ui_trans_proxy - + self.ip = "" # Bind all self.wrapper_nonces = [] - self.add_nonces = [] - self.websockets = [] self.site_manager = SiteManager.site_manager self.sites = SiteManager.site_manager.list() self.log = logging.getLogger(__name__) - config.error_logger.onNewRecord = self.handleErrorLogRecord - - def handleErrorLogRecord(self, record): - self.updateWebsocket(log_event=record.levelname) # After WebUI started def afterStarted(self): @@ -95,9 +70,9 @@ class UiServer: # Handle WSGI request def handleRequest(self, env, start_response): - path = bytes(env["PATH_INFO"], "raw-unicode-escape").decode("utf8") + path = env["PATH_INFO"] if env.get("QUERY_STRING"): - get = dict(urllib.parse.parse_qsl(env['QUERY_STRING'])) + get = dict(cgi.parse_qsl(env['QUERY_STRING'])) else: get = {} ui_request = UiRequest(self, get, env, start_response) @@ -106,7 +81,7 @@ class UiServer: else: # Catch and display the error try: return ui_request.route(path) - except Exception as err: + except Exception, err: logging.debug("UiRequest error: %s" % Debug.formatException(err)) return ui_request.error500("Err: %s" % Debug.formatException(err)) @@ -115,93 +90,72 @@ class UiServer: global UiRequest import imp import sys - importlib.reload(sys.modules["User.UserManager"]) - importlib.reload(sys.modules["Ui.UiWebsocket"]) + reload(sys.modules["User.UserManager"]) + reload(sys.modules["Ui.UiWebsocket"]) UiRequest = imp.load_source("UiRequest", "src/Ui/UiRequest.py").UiRequest # UiRequest.reload() # Bind and run the server def start(self): - self.running = True handler = self.handleRequest if config.debug: # Auto reload UiRequest on change from Debug import DebugReloader - DebugReloader.watcher.addCallback(self.reload) + DebugReloader(self.reload) # Werkzeug Debugger try: from werkzeug.debug import DebuggedApplication handler = DebuggedApplication(self.handleRequest, evalex=True) - except Exception as err: + except Exception, err: self.log.info("%s: For debugging please download Werkzeug (http://werkzeug.pocoo.org/)" % err) from Debug import DebugReloader self.log.write = lambda msg: self.log.debug(msg.strip()) # For Wsgi access.log self.log.info("--------------------------------------") - if ":" in config.ui_ip: - self.log.info("Web interface: http://[%s]:%s/" % (config.ui_ip, config.ui_port)) - else: - self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) + self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) self.log.info("--------------------------------------") - if config.open_browser and config.open_browser != "False": + if config.open_browser: logging.info("Opening browser: %s...", config.open_browser) import webbrowser - try: - if config.open_browser == "default_browser": - browser = webbrowser.get() - else: - browser = webbrowser.get(config.open_browser) - url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage) - gevent.spawn_later(0.3, browser.open, url, new=2) - except Exception as err: - print("Error starting browser: %s" % err) + if config.open_browser == "default_browser": + browser = webbrowser.get() + else: + browser = webbrowser.get(config.open_browser) + url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage) + gevent.spawn_later(0.3, browser.open, url, new=2) - self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log) + self.server = WSGIServer((self.ip.replace("*", ""), self.port), handler, handler_class=UiWSGIHandler, log=self.log) self.server.sockets = {} self.afterStarted() try: self.server.serve_forever() - except Exception as err: + except Exception, err: self.log.error("Web interface bind error, must be running already, exiting.... %s" % err) - import main - main.file_server.stop() + sys.modules["main"].file_server.stop() self.log.debug("Stopped.") def stop(self): self.log.debug("Stopping...") # Close WS sockets if "clients" in dir(self.server): - for client in list(self.server.clients.values()): + for client in self.server.clients.values(): client.ws.close() # Close http sockets sock_closed = 0 - for sock in list(self.server.sockets.values()): + for sock in self.server.sockets.values(): try: - sock.send(b"bye") + sock.send("bye") sock.shutdown(socket.SHUT_RDWR) # sock._sock.close() # sock.close() sock_closed += 1 - except Exception as err: + except Exception, err: self.log.debug("Http connection close error: %s" % err) self.log.debug("Socket closed: %s" % sock_closed) time.sleep(0.1) - if config.debug: - from Debug import DebugReloader - DebugReloader.watcher.stop() self.server.socket.close() self.server.stop() - self.running = False - time.sleep(1) - - def updateWebsocket(self, **kwargs): - if kwargs: - param = {"event": list(kwargs.items())[0]} - else: - param = None - - for ws in self.websockets: - ws.event("serverChanged", param) + time.sleep(1) \ No newline at end of file diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 2f982e1d..35b8ce1c 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -1,30 +1,24 @@ import json import time import sys +import hashlib import os import shutil import re -import copy -import logging -import stat import gevent from Config import config from Site import SiteManager -from Crypt import CryptBitcoin from Debug import Debug from util import QueryJson, RateLimit from Plugin import PluginManager from Translate import translate as _ -from util import helper -from util import SafeRe -from util.Flag import flag -from Content.ContentManager import VerifyError, SignError @PluginManager.acceptPlugins class UiWebsocket(object): + def __init__(self, ws, site, server, user, request): self.ws = ws self.site = site @@ -36,8 +30,14 @@ class UiWebsocket(object): self.next_message_id = 1 self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer self.channels = [] # Channels joined to - self.state = {"sending": False} # Shared state of websocket connection + self.sending = False # Currently sending to client self.send_queue = [] # Messages to send to client + self.admin_commands = ( + "sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone", + "channelJoinAllsite", "serverUpdate", "serverPortcheck", "serverShutdown", "certSet", "configSet", + "actionPermissionAdd", "actionPermissionRemove" + ) + self.async_commands = ("fileGet", "fileList", "dirList") # Start listener loop def start(self): @@ -45,110 +45,95 @@ class UiWebsocket(object): if self.site.address == config.homepage and not self.site.page_requested: # Add open fileserver port message or closed port error to homepage at first request after start self.site.page_requested = True # Dont add connection notification anymore - import main - file_server = main.file_server - if not file_server.port_opened or file_server.tor_manager.start_onions is None: + file_server = sys.modules["main"].file_server + if file_server.port_opened is None or file_server.tor_manager.start_onions is None: self.site.page_requested = False # Not ready yet, check next time + elif file_server.port_opened is True: + self.site.notifications.append([ + "done", + _["Congratulation, your port {0} is opened.
    You are full member of ZeroNet network!"].format(config.fileserver_port), + 10000 + ]) + elif config.tor == "always" and file_server.tor_manager.start_onions: + self.site.notifications.append([ + "done", + _(u""" + {_[Tor mode active, every connection using Onion route.]}
    + {_[Successfully started Tor onion hidden services.]} + """), + 10000 + ]) + elif config.tor == "always" and file_server.tor_manager.start_onions is not False: + self.site.notifications.append([ + "error", + _(u""" + {_[Tor mode active, every connection using Onion route.]}
    + {_[Unable to start hidden services, please check your config.]} + """), + 0 + ]) + elif file_server.port_opened is False and file_server.tor_manager.start_onions: + self.site.notifications.append([ + "done", + _(u""" + {_[Successfully started Tor onion hidden services.]}
    + {_[For faster connections open {0} port on your router.]} + """).format(config.fileserver_port), + 10000 + ]) else: - try: - self.addHomepageNotifications() - except Exception as err: - self.log.error("Uncaught Exception: " + Debug.formatException(err)) + self.site.notifications.append([ + "error", + _(u""" + {_[Your connection is restricted. Please, open {0} port on your router]}
    + {_[or configure Tor to become full member of ZeroNet network.]} + """).format(config.fileserver_port), + 0 + ]) for notification in self.site.notifications: # Send pending notification messages - # send via WebSocket self.cmd("notification", notification) - # just in case, log them to terminal - if notification[0] == "error": - self.log.error("\n*** %s\n" % self.dedent(notification[1])) - self.site.notifications = [] - while True: try: - if ws.closed: - break - else: - message = ws.receive() - except Exception as err: - self.log.error("WebSocket receive error: %s" % Debug.formatException(err)) - break + message = ws.receive() + except Exception, err: + return "Bye." # Close connection if message: try: - req = json.loads(message) - self.handleRequest(req) - except Exception as err: + self.handleRequest(message) + except Exception, err: if config.debug: # Allow websocket errors to appear on /Debug - import main - main.DebugHook.handleError() - self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message)) - if not self.hasPlugin("Multiuser"): - self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) - - self.onClosed() - - def onClosed(self): - pass - - def dedent(self, text): - return re.sub("[\\r\\n\\x20\\t]+", " ", text.strip().replace("
    ", " ")) - - def addHomepageNotifications(self): - if not(self.hasPlugin("Multiuser")) and not(self.hasPlugin("UiPassword")): - bind_ip = getattr(config, "ui_ip", "") - whitelist = getattr(config, "ui_restrict", []) - # binds to the Internet, no IP whitelist, no UiPassword, no Multiuser - if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist): - self.site.notifications.append([ - "error", - _("You are not going to set up a public gateway. However, your Web UI is
    " + - "open to the whole Internet.
    " + - "Please check your configuration.") - ]) - - def hasPlugin(self, name): - return name in PluginManager.plugin_manager.plugin_names + sys.modules["main"].DebugHook.handleError() + self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) + self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) # Has permission to run the command def hasCmdPermission(self, cmd): - flags = flag.db.get(self.getCmdFuncName(cmd), ()) - if "admin" in flags and "ADMIN" not in self.permissions: + cmd = cmd[0].lower() + cmd[1:] + + if cmd in self.admin_commands and "ADMIN" not in self.permissions: return False else: return True # Has permission to access a site - def hasSitePermission(self, address, cmd=None): + def hasSitePermission(self, address): if address != self.site.address and "ADMIN" not in self.site.settings["permissions"]: return False else: return True - def hasFilePermission(self, inner_path): - valid_signers = self.site.content_manager.getValidSigners(inner_path) - return self.site.settings["own"] or self.user.getAuthAddress(self.site.address) in valid_signers - # Event in a channel def event(self, channel, *params): if channel in self.channels: # We are joined to channel if channel == "siteChanged": - site = params[0] - site_info = self.formatSiteInfo(site, create_user=False) + site = params[0] # Triggerer site + site_info = self.formatSiteInfo(site) if len(params) > 1 and params[1]: # Extra data site_info.update(params[1]) self.cmd("setSiteInfo", site_info) - elif channel == "serverChanged": - server_info = self.formatServerInfo() - if len(params) > 0 and params[0]: # Extra data - server_info.update(params[0]) - self.cmd("setServerInfo", server_info) - elif channel == "announcerChanged": - site = params[0] - announcer_info = self.formatAnnouncerInfo(site) - if len(params) > 1 and params[1]: # Extra data - announcer_info.update(params[1]) - self.cmd("setAnnouncerInfo", announcer_info) # Send response to client (to = message.id) def response(self, to, result): @@ -164,18 +149,17 @@ class UiWebsocket(object): self.next_message_id += 1 if cb: # Callback after client responded self.waiting_cb[message["id"]] = cb - self.send_queue.append(message) - if self.state["sending"]: + if self.sending: return # Already sending + self.send_queue.append(message) try: while self.send_queue: - self.state["sending"] = True + self.sending = True message = self.send_queue.pop(0) self.ws.send(json.dumps(message)) - self.state["sending"] = False - except Exception as err: + self.sending = False + except Exception, err: self.log.debug("Websocket send error: %s" % Debug.formatException(err)) - self.state["sending"] = False def getPermissions(self, req_id): permissions = self.site.settings["permissions"] @@ -187,13 +171,10 @@ class UiWebsocket(object): def asyncWrapper(self, func): def asyncErrorWatcher(func, *args, **kwargs): try: - result = func(*args, **kwargs) - if result is not None: - self.response(args[0], result) - except Exception as err: + func(*args, **kwargs) + except Exception, err: if config.debug: # Allow websocket errors to appear on /Debug - import main - main.DebugHook.handleError() + sys.modules["main"].DebugHook.handleError() self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) @@ -201,12 +182,9 @@ class UiWebsocket(object): gevent.spawn(asyncErrorWatcher, func, *args, **kwargs) return wrapper - def getCmdFuncName(self, cmd): - func_name = "action" + cmd[0].upper() + cmd[1:] - return func_name - # Handle incoming messages - def handleRequest(self, req): + def handleRequest(self, data): + req = json.loads(data) cmd = req.get("cmd") params = req.get("params") @@ -214,39 +192,32 @@ class UiWebsocket(object): if cmd == "response": # It's a response to a command return self.actionResponse(req["to"], req["result"]) + elif not self.hasCmdPermission(cmd): # Admin commands + return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) else: # Normal command - func_name = self.getCmdFuncName(cmd) + func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) - if self.site.settings.get("deleting"): - return self.response(req["id"], {"error": "Site is deleting"}) - if not func: # Unknown command - return self.response(req["id"], {"error": "Unknown command: %s" % cmd}) - - if not self.hasCmdPermission(cmd): # Admin commands - return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) + self.response(req["id"], {"error": "Unknown command: %s" % cmd}) + return # Execute in parallel - func_flags = flag.db.get(self.getCmdFuncName(cmd), ()) - if func_flags and "async_run" in func_flags: + if cmd in self.async_commands: func = self.asyncWrapper(func) # Support calling as named, unnamed parameters and raw first argument too if type(params) is dict: - result = func(req["id"], **params) + func(req["id"], **params) elif type(params) is list: - result = func(req["id"], *params) + func(req["id"], *params) elif params: - result = func(req["id"], params) + func(req["id"], params) else: - result = func(req["id"]) - - if result is not None: - self.response(req["id"], result) + func(req["id"]) # Format site info def formatSiteInfo(self, site, create_user=True): - content = site.content_manager.contents.get("content.json", {}) + content = site.content_manager.contents.get("content.json") if content: # Remove unnecessary data transfer content = content.copy() content["files"] = len(content.get("files", {})) @@ -261,13 +232,14 @@ class UiWebsocket(object): settings = site.settings.copy() del settings["wrapper_key"] # Dont expose wrapper key + del settings["auth_key"] # Dont send auth key twice ret = { + "auth_key": self.site.settings["auth_key"], # Obsolete, will be removed + "auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], # Obsolete, will be removed "auth_address": self.user.getAuthAddress(site.address, create=create_user), "cert_user_id": self.user.getCertUserId(site.address), "address": site.address, - "address_short": site.address_short, - "address_hash": site.address_hash.hex(), "settings": settings, "content_updated": site.content_updated, "bad_files": len(site.bad_files), @@ -281,63 +253,29 @@ class UiWebsocket(object): } if site.settings["own"]: ret["privatekey"] = bool(self.user.getSiteData(site.address, create=create_user).get("privatekey")) - if site.isServing() and content: + if site.settings["serving"] and content: ret["peers"] += 1 # Add myself if serving return ret def formatServerInfo(self): - import main - file_server = main.file_server - if file_server.port_opened == {}: - ip_external = None - else: - ip_external = any(file_server.port_opened.values()) - back = { - "ip_external": ip_external, - "port_opened": file_server.port_opened, + return { + "ip_external": sys.modules["main"].file_server.port_opened, "platform": sys.platform, "fileserver_ip": config.fileserver_ip, "fileserver_port": config.fileserver_port, - "tor_enabled": file_server.tor_manager.enabled, - "tor_status": file_server.tor_manager.status, - "tor_has_meek_bridges": file_server.tor_manager.has_meek_bridges, - "tor_use_bridges": config.tor_use_bridges, + "tor_enabled": sys.modules["main"].file_server.tor_manager.enabled, + "tor_status": sys.modules["main"].file_server.tor_manager.status, "ui_ip": config.ui_ip, "ui_port": config.ui_port, "version": config.version, "rev": config.rev, - "timecorrection": file_server.timecorrection, "language": config.language, "debug": config.debug, - "offline": config.offline, - "plugins": PluginManager.plugin_manager.plugin_names, - "plugins_rev": PluginManager.plugin_manager.plugins_rev, - "user_settings": self.user.settings + "plugins": PluginManager.plugin_manager.plugin_names } - if "ADMIN" in self.site.settings["permissions"]: - back["updatesite"] = config.updatesite - back["dist_type"] = config.dist_type - back["lib_verify_best"] = CryptBitcoin.lib_verify_best - return back - - def formatAnnouncerInfo(self, site): - return {"address": site.address, "stats": site.announcer.stats} # - Actions - - def actionAs(self, to, address, cmd, params=[]): - if not self.hasSitePermission(address, cmd=cmd): - #TODO! Return this as error ? - return self.response(to, "No permission for site %s" % address) - if not self.server.sites.get(address): - return self.response(to, {"error": "Site Does Not Exist: %s" % address}) - req_self = copy.copy(self) - req_self.site = self.server.sites.get(address) - req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site - req_obj = super(UiWebsocket, req_self) - req = {"id": to, "cmd": cmd, "params": params} - req_obj.handleRequest(req) - # Do callback on response {"cmd": "response", "to": message_id, "result": result} def actionResponse(self, to, result): if to in self.waiting_cb: @@ -357,56 +295,18 @@ class UiWebsocket(object): ret["event"] = ("file_done", file_status) self.response(to, ret) - def actionSiteBadFiles(self, to): - return list(self.site.bad_files.keys()) - # Join to an event channel - def actionChannelJoin(self, to, channels): - if type(channels) != list: - channels = [channels] - - for channel in channels: - if channel not in self.channels: - self.channels.append(channel) - - self.response(to, "ok") + def actionChannelJoin(self, to, channel): + if channel not in self.channels: + self.channels.append(channel) # Server variables def actionServerInfo(self, to): - back = self.formatServerInfo() - self.response(to, back) - - # Create a new wrapper nonce that allows to load html file - @flag.admin - def actionServerGetWrapperNonce(self, to): - wrapper_nonce = self.request.getWrapperNonce() - self.response(to, wrapper_nonce) - - def actionAnnouncerInfo(self, to): - back = self.formatAnnouncerInfo(self.site) - self.response(to, back) - - @flag.admin - def actionAnnouncerStats(self, to): - back = {} - trackers = self.site.announcer.getTrackers() - for site in list(self.server.sites.values()): - for tracker, stats in site.announcer.stats.items(): - if tracker not in trackers: - continue - if tracker not in back: - back[tracker] = {} - is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"]) - for key, val in stats.items(): - if key.startswith("num_"): - back[tracker][key] = back[tracker].get(key, 0) + val - elif is_latest_data: - back[tracker][key] = val - - return back + ret = self.formatServerInfo() + self.response(to, ret) # Sign content.json - def actionSiteSign(self, to, privatekey=None, inner_path="content.json", remove_missing_optional=False, update_changed_files=False, response_ok=True): + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", response_ok=True, update_changed_files=False, remove_missing_optional=False): self.log.debug("Signing: %s" % inner_path) site = self.site extend = {} # Extended info for signing @@ -419,30 +319,21 @@ class UiWebsocket(object): inner_path = file_info["content_inner_path"] # Add certificate to user files - is_user_content = file_info and ("cert_signers" in file_info or "cert_signers_pattern" in file_info) - if is_user_content and privatekey is None: + if file_info and "cert_signers" in file_info and privatekey is None: cert = self.user.getCert(self.site.address) - if not cert: - error = "Site sign failed: No certificate selected for Site: %s, Hence Signing inner_path: %s Failed, Try Adding/Selecting User Cert via Site Login" % (self.site.address, inner_path) - self.log.error(error) - return self.response(to, {"error": error}) - else: - extend["cert_auth_type"] = cert["auth_type"] - extend["cert_user_id"] = self.user.getCertUserId(site.address) - extend["cert_sign"] = cert["cert_sign"] - self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) + extend["cert_auth_type"] = cert["auth_type"] + extend["cert_user_id"] = self.user.getCertUserId(site.address) + extend["cert_sign"] = cert["cert_sign"] - if not self.hasFilePermission(inner_path): + if ( + not site.settings["own"] and + self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path) + ): self.log.error("SiteSign error: you don't own this site & site owner doesn't allow you to do so.") return self.response(to, {"error": "Forbidden, you can only modify your own sites"}) if privatekey == "stored": # Get privatekey from sites.json privatekey = self.user.getSiteData(self.site.address).get("privatekey") - if not privatekey: - self.cmd("notification", ["error", _["Content signing failed"] + "
    Private key not found in sites.json "]) - self.response(to, {"error": "Site sign failed: Private key not stored."}) - self.log.error("Site sign failed: %s: Private key not stored in sites.json" % inner_path) - return if not privatekey: # Get privatekey from users.json auth_address privatekey = self.user.getAuthPrivatekey(self.site.address) @@ -450,17 +341,10 @@ class UiWebsocket(object): # Reload content.json, ignore errors to make it up-to-date site.content_manager.loadContent(inner_path, add_bad_files=False, force=True) # Sign using private key sent by user - try: - site.content_manager.sign(inner_path, privatekey, extend=extend, update_changed_files=update_changed_files, remove_missing_optional=remove_missing_optional) - except (VerifyError, SignError) as err: - self.cmd("notification", ["error", _["Content signing failed"] + "
    %s" % err]) - self.response(to, {"error": "Site sign failed: %s" % err}) - self.log.error("Site sign failed: %s: %s" % (inner_path, Debug.formatException(err))) - return - except Exception as err: - self.cmd("notification", ["error", _["Content signing error"] + "
    %s" % Debug.formatException(err)]) - self.response(to, {"error": "Site sign error: %s" % Debug.formatException(err)}) - self.log.error("Site sign error: %s: %s" % (inner_path, Debug.formatException(err))) + signed = site.content_manager.sign(inner_path, privatekey, extend=extend, update_changed_files=update_changed_files, remove_missing_optional=remove_missing_optional) + if not signed: + self.cmd("notification", ["error", _["Content signing failed"]]) + self.response(to, {"error": "Site sign failed"}) return site.content_manager.loadContent(inner_path, add_bad_files=False) # Load new content.json, ignore errors @@ -470,16 +354,13 @@ class UiWebsocket(object): if response_ok: self.response(to, "ok") - else: - return inner_path + + return inner_path # Sign and publish content.json - def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True, remove_missing_optional=False, update_changed_files=False): + def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): if sign: - inner_path = self.actionSiteSign( - to, privatekey, inner_path, response_ok=False, - remove_missing_optional=remove_missing_optional, update_changed_files=update_changed_files - ) + inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) if not inner_path: return # Publishing @@ -488,9 +369,6 @@ class UiWebsocket(object): self.site.saveSettings() self.site.announce() - if inner_path not in self.site.content_manager.contents: - return self.response(to, {"error": "File %s not found" % inner_path}) - event_name = "publish %s %s" % (self.site.address, inner_path) called_instantly = RateLimit.isAllowed(event_name, 30) thread = RateLimit.callAsync(event_name, 30, self.doSitePublish, self.site, inner_path) # Only publish once in 30 seconds @@ -518,7 +396,7 @@ class UiWebsocket(object): progress ]) diffs = site.content_manager.getDiffs(inner_path) - back = site.publish(limit=10, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) + back = site.publish(limit=5, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) if back == 0: # Failed to publish to anyone self.cmd("progress", ["publish", _["Content publish failed."], -100]) else: @@ -536,8 +414,7 @@ class UiWebsocket(object): self.response(to, "ok") else: if len(site.peers) == 0: - import main - if any(main.file_server.port_opened.values()) or main.file_server.tor_manager.start_onions: + if sys.modules["main"].file_server.port_opened or sys.modules["main"].file_server.tor_manager.start_onions: if notification: self.cmd("notification", ["info", _["No peers found, but your content is ready to access."]]) if callback: @@ -546,7 +423,7 @@ class UiWebsocket(object): if notification: self.cmd("notification", [ "info", - _("""{_[Your network connection is restricted. Please, open {0} port]}
    + _(u"""{_[Your network connection is restricted. Please, open {0} port]}
    {_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port) ]) if callback: @@ -556,17 +433,11 @@ class UiWebsocket(object): if notification: self.response(to, {"error": "Content publish failed."}) - def actionSiteReload(self, to, inner_path): - self.site.content_manager.loadContent(inner_path, add_bad_files=False) - self.site.storage.verifyFiles(quick_check=True) - self.site.updateWebsocket() - return "ok" - # Write a file to disk def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False): valid_signers = self.site.content_manager.getValidSigners(inner_path) auth_address = self.user.getAuthAddress(self.site.address) - if not self.hasFilePermission(inner_path): + if not self.site.settings["own"] and auth_address not in valid_signers: self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers)) return self.response(to, {"error": "Forbidden, you can only modify your own files"}) @@ -578,7 +449,7 @@ class UiWebsocket(object): self.cmd( "confirm", [_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]], - lambda res: self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) + lambda (res): self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) ) return False @@ -599,7 +470,7 @@ class UiWebsocket(object): shutil.copyfileobj(f_old, f_new) self.site.storage.write(inner_path, content) - except Exception as err: + except Exception, err: self.log.error("File write error: %s" % Debug.formatException(err)) return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)}) @@ -614,14 +485,15 @@ class UiWebsocket(object): ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]}) def actionFileDelete(self, to, inner_path): - if not self.hasFilePermission(inner_path): + if ( + not self.site.settings["own"] and + self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path) + ): self.log.error("File delete error: you don't own this site & you are not approved by the owner.") return self.response(to, {"error": "Forbidden, you can only modify your own files"}) - need_delete = True file_info = self.site.content_manager.getFileInfo(inner_path) - if file_info and file_info.get("optional"): - # Non-existing optional files won't be removed from content.json, so we have to do it manually + if file_info.get("optional"): self.log.debug("Deleting optional file: %s" % inner_path) relative_path = file_info["relative_path"] content_json = self.site.storage.loadJson(file_info["content_inner_path"]) @@ -629,14 +501,12 @@ class UiWebsocket(object): del content_json["files_optional"][relative_path] self.site.storage.writeJson(file_info["content_inner_path"], content_json) self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) - need_delete = self.site.storage.isFile(inner_path) # File sill exists after removing from content.json (owned site) - if need_delete: - try: - self.site.storage.delete(inner_path) - except Exception as err: - self.log.error("File delete error: %s" % err) - return self.response(to, {"error": "Delete error: %s" % Debug.formatExceptionMessage(err)}) + try: + self.site.storage.delete(inner_path) + except Exception, err: + self.log.error("File delete error: Exception - %s" % err) + return self.response(to, {"error": "Delete error: %s" % err}) self.response(to, "ok") @@ -646,40 +516,20 @@ class UiWebsocket(object): ws.event("siteChanged", self.site, {"event": ["file_deleted", inner_path]}) # Find data in json files - def actionFileQuery(self, to, dir_inner_path, query=None): + def actionFileQuery(self, to, dir_inner_path, query): # s = time.time() dir_path = self.site.storage.getPath(dir_inner_path) - rows = list(QueryJson.query(dir_path, query or "")) + rows = list(QueryJson.query(dir_path, query)) # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s)) return self.response(to, rows) # List files in directory - @flag.async_run def actionFileList(self, to, inner_path): - try: - return list(self.site.storage.walk(inner_path)) - except Exception as err: - self.log.error("fileList %s error: %s" % (inner_path, Debug.formatException(err))) - return {"error": Debug.formatExceptionMessage(err)} + return self.response(to, list(self.site.storage.walk(inner_path))) # List directories in a directory - @flag.async_run - def actionDirList(self, to, inner_path, stats=False): - try: - if stats: - back = [] - for file_name in self.site.storage.list(inner_path): - file_stats = os.stat(self.site.storage.getPath(inner_path + "/" + file_name)) - is_dir = stat.S_ISDIR(file_stats.st_mode) - back.append( - {"name": file_name, "size": file_stats.st_size, "is_dir": is_dir} - ) - return back - else: - return list(self.site.storage.list(inner_path)) - except Exception as err: - self.log.error("dirList %s error: %s" % (inner_path, Debug.formatException(err))) - return {"error": Debug.formatExceptionMessage(err)} + def actionDirList(self, to, inner_path): + return self.response(to, list(self.site.storage.list(inner_path))) # Sql query def actionDbQuery(self, to, query, params=None, wait_for=None): @@ -687,10 +537,12 @@ class UiWebsocket(object): s = time.time() rows = [] try: + if not query.strip().upper().startswith("SELECT"): + raise Exception("Only SELECT query supported") res = self.site.storage.query(query, params) - except Exception as err: # Response the error to client - self.log.error("DbQuery error: %s" % Debug.formatException(err)) - return self.response(to, {"error": Debug.formatExceptionMessage(err)}) + except Exception, err: # Response the error to client + self.log.error("DbQuery error: %s" % err) + return self.response(to, {"error": str(err)}) # Convert result to dict for row in res: rows.append(dict(row)) @@ -699,55 +551,26 @@ class UiWebsocket(object): return self.response(to, rows) # Return file content - @flag.async_run - def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300, priority=6): + def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300): try: if required or inner_path in self.site.bad_files: with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=priority) - body = self.site.storage.read(inner_path, "rb") - except (Exception, gevent.Timeout) as err: - self.log.debug("%s fileGet error: %s" % (inner_path, Debug.formatException(err))) + self.site.needFile(inner_path, priority=6) + body = self.site.storage.read(inner_path) + except Exception, err: + self.log.error("%s fileGet error: %s" % (inner_path, err)) body = None - - if not body: - body = None - elif format == "base64": + if body and format == "base64": import base64 - body = base64.b64encode(body).decode() - else: - try: - body = body.decode() - except Exception as err: - self.response(to, {"error": "Error decoding text: %s" % err}) - self.response(to, body) + body = base64.b64encode(body) + return self.response(to, body) - @flag.async_run - def actionFileNeed(self, to, inner_path, timeout=300, priority=6): - try: - with gevent.Timeout(timeout): - self.site.needFile(inner_path, priority=priority) - except (Exception, gevent.Timeout) as err: - return self.response(to, {"error": Debug.formatExceptionMessage(err)}) - return self.response(to, "ok") - - def actionFileRules(self, to, inner_path, use_my_cert=False, content=None): - if not content: # No content defined by function call - content = self.site.content_manager.contents.get(inner_path) - - if not content: # File not created yet - cert = self.user.getCert(self.site.address) - if cert and cert["auth_address"] in self.site.content_manager.getValidSigners(inner_path): - # Current selected cert if valid for this site, add it to query rules - content = {} - content["cert_auth_type"] = cert["auth_type"] - content["cert_user_id"] = self.user.getCertUserId(self.site.address) - content["cert_sign"] = cert["cert_sign"] - - rules = self.site.content_manager.getRules(inner_path, content) + def actionFileRules(self, to, inner_path): + rules = self.site.content_manager.getRules(inner_path) if inner_path.endswith("content.json") and rules: + content = self.site.content_manager.contents.get(inner_path) if content: - rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in list(content.get("files", {}).values())]) + rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) else: rules["current_size"] = 0 return self.response(to, rules) @@ -771,12 +594,12 @@ class UiWebsocket(object): self.cmd( "confirm", [body, _("Change it to {auth_type}/{auth_user_name}@{domain}")], - lambda res: self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) + lambda (res): self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) ) else: self.response(to, "Not changed") - except Exception as err: - self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err))) + except Exception, err: + self.log.error("CertAdd error: Exception - %s" % err.message) self.response(to, {"error": err.message}) def cbCertAddConfirm(self, to, domain, auth_type, auth_user_name, cert): @@ -791,24 +614,19 @@ class UiWebsocket(object): self.response(to, "ok") # Select certificate for site - def actionCertSelect(self, to, accepted_domains=[], accept_any=False, accepted_pattern=None): + def actionCertSelect(self, to, accepted_domains=[], accept_any=False): accounts = [] - accounts.append(["", _["No certificate"], ""]) # Default option + accounts.append(["", _["Unique to site"], ""]) # Default option active = "" # Make it active if no other option found # Add my certs auth_address = self.user.getAuthAddress(self.site.address) # Current auth address site_data = self.user.getSiteData(self.site.address) # Current auth address - - if not accepted_domains and not accepted_pattern: # Accept any if no filter defined - accept_any = True - - for domain, cert in list(self.user.certs.items()): + for domain, cert in self.user.certs.items(): if auth_address == cert["auth_address"] and domain == site_data.get("cert"): active = domain title = cert["auth_user_name"] + "@" + domain - accepted_pattern_match = accepted_pattern and SafeRe.match(accepted_pattern, domain) - if domain in accepted_domains or accept_any or accepted_pattern_match: + if domain in accepted_domains or not accepted_domains or accept_any: accounts.append([domain, title, ""]) else: accounts.append([domain, title, "disabled"]) @@ -819,7 +637,7 @@ class UiWebsocket(object): for domain, account, css_class in accounts: if domain == active: css_class += " active" # Currently selected option - title = _("%s ({_[currently selected]})") % account + title = _(u"%s ({_[currently selected]})") % account else: title = "%s" % account body += "%s" % (css_class, domain, title) @@ -829,27 +647,28 @@ class UiWebsocket(object): # body+= "Accepted authorization providers by the site:" body += "
    " for domain in more_domains: - body += _(""" - + body += _(u""" + {_[Register]} »{domain} """) body += "
    " - script = """ + body += """ + + """ - self.cmd("notification", ["ask", body], lambda domain: self.actionCertSet(to, domain)) - self.cmd("injectScript", script) + # Send the notification + self.cmd("notification", ["ask", body]) # - Admin actions - - @flag.admin def actionPermissionAdd(self, to, permission): if permission not in self.site.settings["permissions"]: self.site.settings["permissions"].append(permission) @@ -857,86 +676,52 @@ class UiWebsocket(object): self.site.updateWebsocket(permission_added=permission) self.response(to, "ok") - @flag.admin def actionPermissionRemove(self, to, permission): self.site.settings["permissions"].remove(permission) self.site.saveSettings() self.site.updateWebsocket(permission_removed=permission) self.response(to, "ok") - @flag.admin - def actionPermissionDetails(self, to, permission): - if permission == "ADMIN": - self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") - elif permission == "NOSANDBOX": - self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") - elif permission == "PushNotification": - self.response(to, _["Send notifications"]) - else: - self.response(to, "") - # Set certificate that used for authenticate user for site - @flag.admin def actionCertSet(self, to, domain): self.user.setCert(self.site.address, domain) self.site.updateWebsocket(cert_changed=domain) - self.response(to, "ok") - - # List user's certificates - @flag.admin - def actionCertList(self, to): - back = [] - auth_address = self.user.getAuthAddress(self.site.address) - for domain, cert in list(self.user.certs.items()): - back.append({ - "auth_address": cert["auth_address"], - "auth_type": cert["auth_type"], - "auth_user_name": cert["auth_user_name"], - "domain": domain, - "selected": cert["auth_address"] == auth_address - }) - return back # List all site info - @flag.admin - def actionSiteList(self, to, connecting_sites=False): + def actionSiteList(self, to): ret = [] - for site in list(self.server.sites.values()): - if not site.content_manager.contents.get("content.json") and not connecting_sites: - continue # Incomplete site + SiteManager.site_manager.load() # Reload sites + for site in self.server.sites.values(): + if not site.content_manager.contents.get("content.json"): + continue # Broken site ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing self.response(to, ret) # Join to an event channel on all sites - @flag.admin def actionChannelJoinAllsite(self, to, channel): if channel not in self.channels: # Add channel to channels self.channels.append(channel) - for site in list(self.server.sites.values()): # Add websocket to every channel + for site in self.server.sites.values(): # Add websocket to every channel if self not in site.websockets: site.websockets.append(self) - self.response(to, "ok") - # Update site content.json - def actionSiteUpdate(self, to, address, check_files=False, since=None, announce=False): + def actionSiteUpdate(self, to, address, check_files=False, since=None): def updateThread(): - site.update(announce=announce, check_files=check_files, since=since) + site.update(check_files=check_files, since=since) self.response(to, "Updated") site = self.server.sites.get(address) + if not site.settings["serving"]: + site.settings["serving"] = True + site.saveSettings() if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]): - if not site.settings["serving"]: - site.settings["serving"] = True - site.saveSettings() - gevent.spawn(updateThread) else: self.response(to, {"error": "Unknown site: %s" % address}) # Pause site serving - @flag.admin def actionSitePause(self, to, address): site = self.server.sites.get(address) if site: @@ -949,7 +734,6 @@ class UiWebsocket(object): self.response(to, {"error": "Unknown site: %s" % address}) # Resume site serving - @flag.admin def actionSiteResume(self, to, address): site = self.server.sites.get(address) if site: @@ -962,8 +746,6 @@ class UiWebsocket(object): else: self.response(to, {"error": "Unknown site: %s" % address}) - @flag.admin - @flag.no_multiuser def actionSiteDelete(self, to, address): site = self.server.sites.get(address) if site: @@ -975,304 +757,56 @@ class UiWebsocket(object): else: self.response(to, {"error": "Unknown site: %s" % address}) - def cbSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): + def actionSiteClone(self, to, address, root_inner_path=""): self.cmd("notification", ["info", _["Cloning site..."]]) site = self.server.sites.get(address) - response = {} - if target_address: - target_site = self.server.sites.get(target_address) - privatekey = self.user.getSiteData(target_site.address).get("privatekey") - site.clone(target_address, privatekey, root_inner_path=root_inner_path) - self.cmd("notification", ["done", _["Site source code upgraded!"]]) - site.publish() - response = {"address": target_address} - else: - # Generate a new site from user's bip32 seed - new_address, new_address_index, new_site_data = self.user.getNewSiteData() - new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index, root_inner_path=root_inner_path) - new_site.settings["own"] = True - new_site.saveSettings() - self.cmd("notification", ["done", _["Site cloned"]]) - if redirect: - self.cmd("redirect", "/%s" % new_address) - gevent.spawn(new_site.announce) - response = {"address": new_address} - self.response(to, response) - return "ok" + # Generate a new site from user's bip32 seed + new_address, new_address_index, new_site_data = self.user.getNewSiteData() + new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index, root_inner_path=root_inner_path) + new_site.settings["own"] = True + new_site.saveSettings() + self.cmd("notification", ["done", _["Site cloned"] + "" % new_address]) + gevent.spawn(new_site.announce) - @flag.no_multiuser - def actionSiteClone(self, to, address, root_inner_path="", target_address=None, redirect=True): - if not SiteManager.site_manager.isAddress(address): - self.response(to, {"error": "Not a site: %s" % address}) - return - - if not self.server.sites.get(address): - # Don't expose site existence - return - - site = self.server.sites.get(address) - if site.bad_files: - for bad_inner_path in list(site.bad_files.keys()): - is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path) - if not is_user_file and bad_inner_path != "content.json": - self.cmd("notification", ["error", _["Clone error: Site still in sync"]]) - return {"error": "Site still in sync"} - - if "ADMIN" in self.getPermissions(to): - self.cbSiteClone(to, address, root_inner_path, target_address, redirect) - else: - self.cmd( - "confirm", - [_["Clone site %s?"] % address, _["Clone"]], - lambda res: self.cbSiteClone(to, address, root_inner_path, target_address, redirect) - ) - - @flag.admin - @flag.no_multiuser def actionSiteSetLimit(self, to, size_limit): self.site.settings["size_limit"] = int(size_limit) self.site.saveSettings() self.response(to, "ok") - self.site.updateWebsocket() self.site.download(blind_includes=True) - @flag.admin - def actionSiteAdd(self, to, address): - site_manager = SiteManager.site_manager - if address in site_manager.sites: - return {"error": "Site already added"} - else: - if site_manager.need(address): - return "ok" - else: - return {"error": "Invalid address"} - - @flag.async_run - def actionSiteListModifiedFiles(self, to, content_inner_path="content.json"): - content = self.site.content_manager.contents.get(content_inner_path) - if not content: - return {"error": "content file not avaliable"} - - min_mtime = content.get("modified", 0) - site_path = self.site.storage.directory - modified_files = [] - - # Load cache if not signed since last modified check - if content.get("modified", 0) < self.site.settings["cache"].get("time_modified_files_check", 0): - min_mtime = self.site.settings["cache"].get("time_modified_files_check") - modified_files = self.site.settings["cache"].get("modified_files", []) - - inner_paths = [content_inner_path] + list(content.get("includes", {}).keys()) + list(content.get("files", {}).keys()) - - if len(inner_paths) > 100: - return {"error": "Too many files in content.json"} - - for relative_inner_path in inner_paths: - inner_path = helper.getDirname(content_inner_path) + relative_inner_path - try: - is_mtime_newer = os.path.getmtime(self.site.storage.getPath(inner_path)) > min_mtime + 1 - if is_mtime_newer: - if inner_path.endswith("content.json"): - is_modified = self.site.content_manager.isModified(inner_path) - else: - previous_size = content["files"][inner_path]["size"] - is_same_size = self.site.storage.getSize(inner_path) == previous_size - ext = inner_path.rsplit(".", 1)[-1] - is_text_file = ext in ["json", "txt", "html", "js", "css"] - if is_same_size: - if is_text_file: - is_modified = self.site.content_manager.isModified(inner_path) # Check sha512 hash - else: - is_modified = False - else: - is_modified = True - - # Check ran, modified back to original value, but in the cache - if not is_modified and inner_path in modified_files: - modified_files.remove(inner_path) - else: - is_modified = False - except Exception as err: - if not self.site.storage.isFile(inner_path): # File deleted - is_modified = True - else: - raise err - if is_modified and inner_path not in modified_files: - modified_files.append(inner_path) - - self.site.settings["cache"]["time_modified_files_check"] = time.time() - self.site.settings["cache"]["modified_files"] = modified_files - return {"modified_files": modified_files} - - @flag.admin - def actionSiteSetSettingsValue(self, to, key, value): - if key not in ["modified_files_notification"]: - return {"error": "Can't change this key"} - - self.site.settings[key] = value - - return "ok" - - def actionUserGetSettings(self, to): - settings = self.user.sites.get(self.site.address, {}).get("settings", {}) - self.response(to, settings) - - def actionUserSetSettings(self, to, settings): - self.user.setSiteSettings(self.site.address, settings) - self.response(to, "ok") - - def actionUserGetGlobalSettings(self, to): - settings = self.user.settings - self.response(to, settings) - - @flag.admin - def actionUserSetGlobalSettings(self, to, settings): - self.user.settings = settings - self.user.save() - self.response(to, "ok") - - @flag.admin - @flag.no_multiuser - def actionServerErrors(self, to): - return config.error_logger.lines - - @flag.admin - @flag.no_multiuser def actionServerUpdate(self, to): - def cbServerUpdate(res): - self.response(to, res) - if not res: - return False - for websocket in self.server.websockets: - websocket.cmd( - "notification", - ["info", _["Updating ZeroNet client, will be back in a few minutes..."], 20000] - ) - websocket.cmd("updating") + self.cmd("updating") + sys.modules["main"].update_after_shutdown = True + if sys.modules["main"].file_server.tor_manager.tor_process: + sys.modules["main"].file_server.tor_manager.stopTor() + SiteManager.site_manager.save() + sys.modules["main"].file_server.stop() + sys.modules["main"].ui_server.stop() - import main - main.update_after_shutdown = True - main.restart_after_shutdown = True - SiteManager.site_manager.save() - main.file_server.stop() - main.ui_server.stop() - - self.cmd( - "confirm", - [_["Update ZeroNet client to latest version?"], _["Update"]], - cbServerUpdate - ) - - @flag.admin - @flag.async_run - @flag.no_multiuser def actionServerPortcheck(self, to): - import main - file_server = main.file_server - file_server.portCheck() - self.response(to, file_server.port_opened) + sys.modules["main"].file_server.port_opened = None + res = sys.modules["main"].file_server.openport() + self.response(to, res) - @flag.admin - @flag.no_multiuser - def actionServerShutdown(self, to, restart=False): - import main - def cbServerShutdown(res): - self.response(to, res) - if not res: - return False - if restart: - main.restart_after_shutdown = True - main.file_server.stop() - main.ui_server.stop() + def actionServerShutdown(self, to): + sys.modules["main"].file_server.stop() + sys.modules["main"].ui_server.stop() - if restart: - message = [_["Restart ZeroNet client?"], _["Restart"]] - else: - message = [_["Shut down ZeroNet client?"], _["Shut down"]] - self.cmd("confirm", message, cbServerShutdown) - - @flag.admin - @flag.no_multiuser - def actionServerShowdirectory(self, to, directory="backup", inner_path=""): - if self.request.env["REMOTE_ADDR"] != "127.0.0.1": - return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"}) - - import webbrowser - if directory == "backup": - path = os.path.abspath(config.data_dir) - elif directory == "log": - path = os.path.abspath(config.log_dir) - elif directory == "site": - path = os.path.abspath(self.site.storage.getPath(helper.getDirname(inner_path))) - - if os.path.isdir(path): - self.log.debug("Opening: %s" % path) - webbrowser.open('file://' + path) - return self.response(to, "ok") - else: - return self.response(to, {"error": "Not a directory"}) - - @flag.admin - @flag.no_multiuser def actionConfigSet(self, to, key, value): - import main - - self.log.debug("Changing config %s value to %r" % (key, value)) - if key not in config.keys_api_change_allowed: - self.response(to, {"error": "Forbidden: You cannot set this config key"}) + if key not in ["tor", "language"]: + self.response(to, {"error": "Forbidden"}) return - if key == "open_browser": - if value not in ["default_browser", "False"]: - self.response(to, {"error": "Forbidden: Invalid value"}) - return - - # Remove empty lines from lists - if type(value) is list: - value = [line for line in value if line] - config.saveValue(key, value) - if key not in config.keys_restart_need: - if value is None: # Default value - setattr(config, key, config.parser.get_default(key)) - setattr(config.arguments, key, config.parser.get_default(key)) - else: - setattr(config, key, value) - setattr(config.arguments, key, value) - else: - config.need_restart = True - config.pending_changes[key] = value - if key == "language": import Translate for translate in Translate.translates: translate.setLanguage(value) - message = _["You have successfully changed the web interface's language!"] + "
    " - message += _["Due to the browser's caching, the full transformation could take some minute."] - self.cmd("notification", ["done", message, 10000]) - - if key == "tor_use_bridges": - if value is None: - value = False - else: - value = True - tor_manager = main.file_server.tor_manager - tor_manager.request("SETCONF UseBridges=%i" % value) - - if key == "trackers_file": - config.loadTrackersFile() - - if key == "log_level": - logging.getLogger('').setLevel(logging.getLevelName(config.log_level)) - - if key == "ip_external": - gevent.spawn(main.file_server.portCheck) - - if key == "offline": - if value: - main.file_server.closeConnections() - else: - gevent.spawn(main.file_server.checkSites, check_files=False, force_port_check=True) + self.cmd("notification", ["done", + _["You have successfully changed the web interface's language!"] + "
    " + + _["Due to the browser's caching, the full transformation could take some minute."] + , 10000]) + config.language = value self.response(to, "ok") diff --git a/src/Ui/__init__.py b/src/Ui/__init__.py index dcb8896d..9982dc4f 100644 --- a/src/Ui/__init__.py +++ b/src/Ui/__init__.py @@ -1,3 +1,3 @@ -from .UiServer import UiServer -from .UiRequest import UiRequest -from .UiWebsocket import UiWebsocket \ No newline at end of file +from UiServer import UiServer +from UiRequest import UiRequest +from UiWebsocket import UiWebsocket \ No newline at end of file diff --git a/src/Ui/media/Fixbutton.coffee b/src/Ui/media/Fixbutton.coffee index 954d2b56..9e644a4e 100644 --- a/src/Ui/media/Fixbutton.coffee +++ b/src/Ui/media/Fixbutton.coffee @@ -11,7 +11,7 @@ class Fixbutton return true $(".fixbutton-bg").stop().animate({"scale": 0.6}, 300, "easeOutCubic") $(".fixbutton-burger").stop().animate({"opacity": 0, "left": -20}, 300, "easeOutCubic") - $(".fixbutton-text").stop().animate({"opacity": 0.9, "left": 0}, 300, "easeOutBack") + $(".fixbutton-text").stop().animate({"opacity": 1, "left": 0}, 300, "easeOutBack") ###$(".fixbutton-bg").on "click", -> diff --git a/src/Ui/media/Infopanel.coffee b/src/Ui/media/Infopanel.coffee deleted file mode 100644 index 3a490364..00000000 --- a/src/Ui/media/Infopanel.coffee +++ /dev/null @@ -1,57 +0,0 @@ -class Infopanel - constructor: (@elem) -> - @visible = false - - show: (closed=false) => - @elem.parent().addClass("visible") - if closed - @close() - else - @open() - - unfold: => - @elem.toggleClass("unfolded") - return false - - updateEvents: => - @elem.off("click") - @elem.find(".close").off("click") - @elem.find(".line").off("click") - - @elem.find(".line").on("click", @unfold) - - if @elem.hasClass("closed") - @elem.on "click", => - @onOpened() - @open() - else - @elem.find(".close").on "click", => - @onClosed() - @close() - - hide: => - @elem.parent().removeClass("visible") - - close: => - @elem.addClass("closed") - @updateEvents() - return false - - open: => - @elem.removeClass("closed") - @updateEvents() - return false - - setTitle: (line1, line2) => - @elem.find(".line-1").text(line1) - @elem.find(".line-2").text(line2) - - setClosedNum: (num) => - @elem.find(".closed-num").text(num) - - setAction: (title, func) => - @elem.find(".button").text(title).off("click").on("click", func) - - - -window.Infopanel = Infopanel diff --git a/src/Ui/media/Loading.coffee b/src/Ui/media/Loading.coffee index 8e35ce66..d0604b6f 100644 --- a/src/Ui/media/Loading.coffee +++ b/src/Ui/media/Loading.coffee @@ -1,21 +1,19 @@ class Loading - constructor: (@wrapper) -> + constructor: -> if window.show_loadingscreen then @showScreen() @timer_hide = null - @timer_set = null + setProgress: (percent) -> if @timer_hide clearInterval @timer_hide - @timer_set = RateLimit 500, -> - $(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block") + RateLimit 200, -> + $(".progressbar").css("width", percent*100+"%").css("opacity", "1").css("display", "block") hideProgress: -> - @log "hideProgress" - if @timer_set - clearInterval @timer_set + console.log "hideProgress" @timer_hide = setTimeout ( => - $(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000) + $(".progressbar").css("width", "100%").css("opacity", "0").hideLater(1000) ), 300 @@ -26,37 +24,20 @@ class Loading showTooLarge: (site_info) -> - @log "Displaying large site confirmation" if $(".console .button-setlimit").length == 0 # Not displaying it yet line = @printLine("Site size: #{parseInt(site_info.settings.size/1024/1024)}MB is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning") button = $("" + "Open site and set size limit to #{site_info.next_size_limit}MB" + "") - button.on "click", => - button.addClass("loading") - return @wrapper.setSizeLimit(site_info.next_size_limit) + button.on "click", (-> return window.wrapper.setSizeLimit(site_info.next_size_limit) ) line.after(button) setTimeout (=> @printLine('Ready.') ), 100 - showTrackerTorBridge: (server_info) -> - if $(".console .button-settrackerbridge").length == 0 and not server_info.tor_use_meek_bridges - line = @printLine("Tracker connection error detected.", "error") - button = $("" + "Use Tor meek bridges for tracker connections" + "") - button.on "click", => - button.addClass("loading") - @wrapper.ws.cmd "configSet", ["tor_use_bridges", ""] - @wrapper.ws.cmd "configSet", ["trackers_proxy", "tor"] - @wrapper.ws.cmd "siteUpdate", {address: @wrapper.site_info.address, announce: true} - @wrapper.reloadIframe() - return false - line.after(button) - if not server_info.tor_has_meek_bridges - button.addClass("disabled") - @printLine("No meek bridge support in your client, please download the latest bundle.", "warning") + # We dont need loadingscreen anymore hideScreen: -> - @log "hideScreen" + console.log "hideScreen" if not $(".loadingscreen").hasClass("done") # Only if its not animating already if @screen_visible # Hide with animate $(".loadingscreen").addClass("done").removeLater(2000) @@ -84,8 +65,6 @@ class Loading if type == "warning" then line.addClass("console-warning") return line - log: (args...) -> - console.log "[Loading]", args... -window.Loading = Loading +window.Loading = Loading \ No newline at end of file diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee index 35d949f3..00c66761 100644 --- a/src/Ui/media/Notifications.coffee +++ b/src/Ui/media/Notifications.coffee @@ -13,7 +13,7 @@ class Notifications add: (id, type, body, timeout=0) -> - id = id.replace /[^A-Za-z0-9-]/g, "" + id = id.replace /[^A-Za-z0-9]/g, "" # Close notifications with same id for elem in $(".notification-#{id}") @close $(elem) @@ -37,7 +37,7 @@ class Notifications $(".notification-icon", elem).html("i") if typeof(body) == "string" - $(".body", elem).html("
    "+body+"
    ") + $(".body", elem).html(""+body+"") else $(".body", elem).html("").append(body) @@ -51,13 +51,12 @@ class Notifications ), timeout # Animate - width = Math.min(elem.outerWidth() + 70, 580) + width = elem.outerWidth() if not timeout then width += 20 # Add space for close button if elem.outerHeight() > 55 then elem.addClass("long") elem.css({"width": "50px", "transform": "scale(0.01)"}) elem.animate({"scale": 1}, 800, "easeOutElastic") elem.animate({"width": width}, 700, "easeInOutCubic") - $(".body", elem).css("width": (width - 50)) $(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000) # Close button or Confirm button @@ -69,11 +68,6 @@ class Notifications $(".select", elem).on "click", => @close elem - # Input enter - $("input", elem).on "keyup", (e) => - if e.keyCode == 13 - @close elem - return elem @@ -86,4 +80,4 @@ class Notifications console.log "[Notifications]", args... -window.Notifications = Notifications +window.Notifications = Notifications \ No newline at end of file diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee index 1b98855e..d755b108 100644 --- a/src/Ui/media/Wrapper.coffee +++ b/src/Ui/media/Wrapper.coffee @@ -2,13 +2,8 @@ class Wrapper constructor: (ws_url) -> @log "Created!" - @loading = new Loading(@) + @loading = new Loading() @notifications = new Notifications($(".notifications")) - @infopanel = new Infopanel($(".infopanel")) - @infopanel.onClosed = => - @ws.cmd("siteSetSettingsValue", ["modified_files_notification", false]) - @infopanel.onOpened = => - @ws.cmd("siteSetSettingsValue", ["modified_files_notification", true]) @fixbutton = new Fixbutton() window.addEventListener("message", @onMessageInner, false) @@ -21,10 +16,7 @@ class Wrapper @ws.connect() @ws_error = null # Ws error message - @next_cmd_message_id = -1 - @site_info = null # Hold latest site info - @server_info = null # Hold latest server info @event_site_info = $.Deferred() # Event when site_info received @inner_loaded = false # If iframe loaded or not @inner_ready = false # Inner frame ready to receive messages @@ -32,13 +24,8 @@ class Wrapper @site_error = null # Latest failed file download @address = null @opener_tested = false - @announcer_line = null - @web_notifications = {} - @is_title_changed = false - @allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors - - window.onload = @onPageLoad # On iframe loaded + window.onload = @onLoad # On iframe loaded window.onhashchange = (e) => # On hash change @log "Hashchange", window.location.hash if window.location.hash @@ -51,22 +38,9 @@ class Wrapper $("#inner-iframe").focus() - verifyEvent: (allowed_target, e) => - if not e.originalEvent.isTrusted - throw "Event not trusted" - - if e.originalEvent.constructor not in @allowed_event_constructors - throw "Invalid event constructor: #{e.constructor} not in #{JSON.stringify(@allowed_event_constructors)}" - - if e.originalEvent.currentTarget != allowed_target[0] - throw "Invalid event target: #{e.originalEvent.currentTarget} != #{allowed_target[0]}" - # Incoming message from UiServer websocket onMessageWebsocket: (e) => message = JSON.parse(e.data) - @handleMessageWebsocket(message) - - handleMessageWebsocket: (message) => cmd = message.cmd if cmd == "response" if @ws.waiting_cb[message.to]? # We are waiting for response @@ -75,14 +49,14 @@ class Wrapper @sendInner message # Pass message to inner frame else if cmd == "notification" # Display notification type = message.params[0] - id = "notification-ws-#{message.id}" + id = "notification-#{message.id}" if "-" in message.params[0] # - in first param: message id defined [id, type] = message.params[0].split("-") @notifications.add(id, type, message.params[1], message.params[2]) else if cmd == "progress" # Display notification @actionProgress(message) else if cmd == "prompt" # Prompt input - @displayPrompt message.params[0], message.params[1], message.params[2], message.params[3], (res) => + @displayPrompt message.params[0], message.params[1], message.params[2], (res) => @ws.response message.id, res else if cmd == "confirm" # Confirm action @displayConfirm message.params[0], message.params[1], (res) => @@ -92,26 +66,11 @@ class Wrapper if message.params.address == @address # Current page @setSiteInfo message.params @updateProgress message.params - else if cmd == "setAnnouncerInfo" - @sendInner message # Pass to inner frame - if message.params.address == @address # Current page - @setAnnouncerInfo message.params - @updateProgress message.params else if cmd == "error" @notifications.add("notification-#{message.id}", "error", message.params, 0) else if cmd == "updating" # Close connection - @log "Updating: Closing websocket" @ws.ws.close() @ws.onCloseWebsocket(null, 4000) - else if cmd == "redirect" - window.top.location = message.params - else if cmd == "injectHtml" - $("body").append(message.params) - else if cmd == "injectScript" - script_tag = $(" -

    ZeroNet requires JavaScript support.

    If you use NoScript/Tor browser: Click on toolbar icon with the notification and choose "Temp. TRUSTED" for 127.0.0.1. -
    - -
    @@ -38,31 +32,20 @@ else if (window.opener && window.opener.location.toString()) {
    -
    +
    0
    +
    ! Test notification×
    - -
    -
    - 8 -
    - 8 modified files
    content.json, data.json -
    - Sign & Publish - × -
    -
    - Config
    @@ -72,32 +55,25 @@ else if (window.opener && window.opener.location.toString()) { - + - - - + + diff --git a/src/User/User.py b/src/User/User.py index dbcfc56f..0bfe082e 100644 --- a/src/User/User.py +++ b/src/User/User.py @@ -1,16 +1,12 @@ import logging import json import time -import binascii - -import gevent import util from Crypt import CryptBitcoin from Plugin import PluginManager from Config import config from util import helper -from Debug import Debug @PluginManager.acceptPlugins @@ -27,8 +23,6 @@ class User(object): self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) self.sites = data.get("sites", {}) self.certs = data.get("certs", {}) - self.settings = data.get("settings", {}) - self.delayed_save_thread = None self.log = logging.getLogger("User:%s" % self.master_address) @@ -44,30 +38,11 @@ class User(object): user_data["master_seed"] = self.master_seed user_data["sites"] = self.sites user_data["certs"] = self.certs - user_data["settings"] = self.settings - helper.atomicWrite("%s/users.json" % config.data_dir, helper.jsonDumps(users).encode("utf8")) - self.log.debug("Saved in %.3fs" % (time.time() - s)) - self.delayed_save_thread = None - - def saveDelayed(self): - if not self.delayed_save_thread: - self.delayed_save_thread = gevent.spawn_later(5, self.save) + helper.atomicWrite("%s/users.json" % config.data_dir, json.dumps(users, indent=2, sort_keys=True)) + self.log.debug("Saved in %.3fs" % (time.time()-s)) def getAddressAuthIndex(self, address): - return int(binascii.hexlify(address.encode()), 16) - - @util.Noparallel() - def generateAuthAddress(self, address): - s = time.time() - address_id = self.getAddressAuthIndex(address) # Convert site address to int - auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) - self.sites[address] = { - "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), - "auth_privatekey": auth_privatekey - } - self.saveDelayed() - self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s)) - return self.sites[address] + return int(address.encode("hex"), 16) # Get user site data # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} @@ -75,21 +50,23 @@ class User(object): if address not in self.sites: # Generate new BIP32 child key based on site address if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet - self.generateAuthAddress(address) + s = time.time() + address_id = self.getAddressAuthIndex(address) # Convert site address to int + auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) + self.sites[address] = { + "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), + "auth_privatekey": auth_privatekey + } + self.save() + self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s)) return self.sites[address] def deleteSiteData(self, address): if address in self.sites: del(self.sites[address]) - self.saveDelayed() + self.save() self.log.debug("Deleted site: %s" % address) - def setSiteSettings(self, address, settings): - site_data = self.getSiteData(address) - site_data["settings"] = settings - self.saveDelayed() - return site_data - # Get data for a new, unique site # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] def getNewSiteData(self): @@ -123,8 +100,9 @@ class User(object): # Add cert for the user def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): + domain = domain.lower() # Find privatekey by auth address - auth_privatekey = [site["auth_privatekey"] for site in list(self.sites.values()) if site["auth_address"] == auth_address][0] + auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] cert_node = { "auth_address": auth_address, "auth_privatekey": auth_privatekey, @@ -154,7 +132,7 @@ class User(object): else: if "cert" in site_data: del site_data["cert"] - self.saveDelayed() + self.save() return site_data # Get cert for the site address diff --git a/src/User/UserManager.py b/src/User/UserManager.py index 067734a6..dff7ece1 100644 --- a/src/User/UserManager.py +++ b/src/User/UserManager.py @@ -1,10 +1,9 @@ # Included modules import json import logging -import time # ZeroNet Modules -from .User import User +from User import User from Plugin import PluginManager from Config import config @@ -13,7 +12,6 @@ from Config import config class UserManager(object): def __init__(self): self.users = {} - self.log = logging.getLogger("UserManager") # Load all user from data/users.json def load(self): @@ -22,15 +20,8 @@ class UserManager(object): user_found = [] added = 0 - s = time.time() # Load new users - try: - json_path = "%s/users.json" % config.data_dir - data = json.load(open(json_path)) - except Exception as err: - raise Exception("Unable to load %s: %s" % (json_path, err)) - - for master_address, data in list(data.items()): + for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items(): if master_address not in self.users: user = User(master_address, data=data) self.users[master_address] = user @@ -38,23 +29,22 @@ class UserManager(object): user_found.append(master_address) # Remove deleted adresses - for master_address in list(self.users.keys()): + for master_address in self.users.keys(): if master_address not in user_found: del(self.users[master_address]) - self.log.debug("Removed user: %s" % master_address) + logging.debug("Removed user: %s" % master_address) if added: - self.log.debug("Added %s users in %.3fs" % (added, time.time() - s)) + logging.debug("UserManager added %s users" % added) # Create new user # Return: User def create(self, master_address=None, master_seed=None): - self.list() # Load the users if it's not loaded yet user = User(master_address, master_seed) - self.log.debug("Created user: %s" % user.master_address) + logging.debug("Created user: %s" % user.master_address) if user.master_address: # If successfully created self.users[user.master_address] = user - user.saveDelayed() + user.save() return user # List all users from data/users.json @@ -69,9 +59,24 @@ class UserManager(object): def get(self, master_address=None): users = self.list() if users: - return list(users.values())[0] # Single user mode, always return the first + return users.values()[0] # Single user mode, always return the first else: return None user_manager = UserManager() # Singleton + + +# Debug: Reload User.py +def reloadModule(): + return "Not used" + + import imp + global User, UserManager, user_manager + User = imp.load_source("User", "src/User/User.py").User # Reload source + # module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module + # UserManager = module.UserManager + # user_manager = module.user_manager + # Reload users + user_manager = UserManager() + user_manager.load() diff --git a/src/User/__init__.py b/src/User/__init__.py index 4db9149e..8d569979 100644 --- a/src/User/__init__.py +++ b/src/User/__init__.py @@ -1 +1 @@ -from .User import User +from User import User diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index b7111ba1..bdeb2431 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -1,23 +1,9 @@ import time import gevent -import gevent.lock from Debug import Debug from Config import config -from Content.ContentManager import VerifyError - - -class WorkerDownloadError(Exception): - pass - - -class WorkerIOError(Exception): - pass - - -class WorkerStop(Exception): - pass class Worker(object): @@ -29,8 +15,6 @@ class Worker(object): self.key = None self.running = False self.thread = None - self.num_downloaded = 0 - self.num_failed = 0 def __str__(self): return "Worker %s %s" % (self.manager.site.address_short, self.key) @@ -38,181 +22,65 @@ class Worker(object): def __repr__(self): return "<%s>" % self.__str__() - def waitForTask(self, task, timeout): # Wait for other workers to finish the task - for sleep_i in range(1, timeout * 10): - time.sleep(0.1) - if task["done"] or task["workers_num"] == 0: - if config.verbose: - self.manager.log.debug("%s: %s, picked task free after %ss sleep. (done: %s)" % ( - self.key, task["inner_path"], 0.1 * sleep_i, task["done"] - )) - break - - if sleep_i % 10 == 0: - workers = self.manager.findWorkers(task) - if not workers or not workers[0].peer.connection: - break - worker_idle = time.time() - workers[0].peer.connection.last_recv_time - if worker_idle > 1: - if config.verbose: - self.manager.log.debug("%s: %s, worker %s seems idle, picked up task after %ss sleep. (done: %s)" % ( - self.key, task["inner_path"], workers[0].key, 0.1 * sleep_i, task["done"] - )) - break - return True - - def pickTask(self): # Find and select a new task for the worker - task = self.manager.getTask(self.peer) - if not task: # No more task - time.sleep(0.1) # Wait a bit for new tasks - task = self.manager.getTask(self.peer) - if not task: # Still no task, stop it - stats = "downloaded files: %s, failed: %s" % (self.num_downloaded, self.num_failed) - self.manager.log.debug("%s: No task found, stopping (%s)" % (self.key, stats)) - return False - - if not task["time_started"]: - task["time_started"] = time.time() # Task started now - - if task["workers_num"] > 0: # Wait a bit if someone already working on it - if task["peers"]: # It's an update - timeout = 3 - else: - timeout = 1 - - if task["size"] > 100 * 1024 * 1024: - timeout = timeout * 2 - - if config.verbose: - self.manager.log.debug("%s: Someone already working on %s (pri: %s), sleeping %s sec..." % ( - self.key, task["inner_path"], task["priority"], timeout - )) - - self.waitForTask(task, timeout) - return task - - def downloadTask(self, task): - try: - buff = self.peer.getFile(task["site"].address, task["inner_path"], task["size"]) - except Exception as err: - self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) - raise WorkerDownloadError(str(err)) - - if not buff: - raise WorkerDownloadError("No response") - - return buff - - def getTaskLock(self, task): - if task["lock"] is None: - task["lock"] = gevent.lock.Semaphore() - return task["lock"] - - def writeTask(self, task, buff): - buff.seek(0) - try: - task["site"].storage.write(task["inner_path"], buff) - except Exception as err: - if type(err) == Debug.Notify: - self.manager.log.debug("%s: Write aborted: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err)) - else: - self.manager.log.error("%s: Error writing: %s (%s: %s)" % (self.key, task["inner_path"], type(err), err)) - raise WorkerIOError(str(err)) - - def onTaskVerifyFail(self, task, error_message): - self.num_failed += 1 - if self.manager.started_task_num < 50 or config.verbose: - self.manager.log.debug( - "%s: Verify failed: %s, error: %s, failed peers: %s, workers: %s" % - (self.key, task["inner_path"], error_message, len(task["failed"]), task["workers_num"]) - ) - task["failed"].append(self.peer) - self.peer.hash_failed += 1 - if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10: - # Broken peer: More fails than tasks number but atleast 3 - raise WorkerStop( - "Too many errors (hash failed: %s, connection error: %s)" % - (self.peer.hash_failed, self.peer.connection_error) - ) - - def handleTask(self, task): - download_err = write_err = False - - write_lock = None - try: - buff = self.downloadTask(task) - - if task["done"] is True: # Task done, try to find new one - return None - - if self.running is False: # Worker no longer needed or got killed - self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) - raise WorkerStop("Running got disabled") - - write_lock = self.getTaskLock(task) - write_lock.acquire() - if task["site"].content_manager.verifyFile(task["inner_path"], buff) is None: - is_same = True - else: - is_same = False - is_valid = True - except (WorkerDownloadError, VerifyError) as err: - download_err = err - is_valid = False - is_same = False - - if is_valid and not is_same: - if self.manager.started_task_num < 50 or task["priority"] > 10 or config.verbose: - self.manager.log.debug("%s: Verify correct: %s" % (self.key, task["inner_path"])) - try: - self.writeTask(task, buff) - except WorkerIOError as err: - write_err = err - - if not task["done"]: - if write_err: - self.manager.failTask(task, reason="Write error") - self.num_failed += 1 - self.manager.log.error("%s: Error writing %s: %s" % (self.key, task["inner_path"], write_err)) - elif is_valid: - self.manager.doneTask(task) - self.num_downloaded += 1 - - if write_lock is not None and write_lock.locked(): - write_lock.release() - - if not is_valid: - self.onTaskVerifyFail(task, download_err) - time.sleep(1) - return False - - return True - + # Downloader thread def downloader(self): self.peer.hash_failed = 0 # Reset hash error counter while self.running: # Try to pickup free file download task - task = self.pickTask() - - if not task: + task = self.manager.getTask(self.peer) + if not task: # Die, no more task + self.manager.log.debug("%s: No task found, stopping" % self.key) break + if not task["time_started"]: + task["time_started"] = time.time() # Task started now - if task["done"]: - continue - - self.task = task - - self.manager.addTaskWorker(task, self) - - try: - success = self.handleTask(task) - except WorkerStop as err: - self.manager.log.debug("%s: Worker stopped: %s" % (self.key, err)) - self.manager.removeTaskWorker(task, self) - break - - self.manager.removeTaskWorker(task, self) + if task["workers_num"] > 0: # Wait a bit if someone already working on it + if config.verbose: + self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"])) + time.sleep(1) + if config.verbose: + self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"])) + if task["done"] is False: + self.task = task + site = task["site"] + task["workers_num"] += 1 + try: + buff = self.peer.getFile(site.address, task["inner_path"]) + except Exception, err: + self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) + buff = None + if self.running is False: # Worker no longer needed or got killed + self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) + break + if task["done"] is True: # Task done, try to find new one + continue + if buff: # Download ok + correct = site.content_manager.verifyFile(task["inner_path"], buff) + else: # Download error + correct = False + if correct is True or correct is None: # Hash ok or same file + self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"])) + if correct is True and task["done"] is False: # Save if changed and task not done yet + buff.seek(0) + site.storage.write(task["inner_path"], buff) + if task["done"] is False: + self.manager.doneTask(task) + task["workers_num"] -= 1 + self.task = None + else: # Hash failed + self.manager.log.debug( + "%s: Hash failed: %s, failed peers: %s" % + (self.key, task["inner_path"], len(task["failed"])) + ) + task["failed"].append(self.peer) + self.task = None + self.peer.hash_failed += 1 + if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10: + # Broken peer: More fails than tasks number but atleast 3 + break + task["workers_num"] -= 1 + time.sleep(1) self.peer.onWorkerDone() self.running = False self.manager.removeWorker(self) @@ -223,17 +91,17 @@ class Worker(object): self.thread = gevent.spawn(self.downloader) # Skip current task - def skip(self, reason="Unknown"): - self.manager.log.debug("%s: Force skipping (reason: %s)" % (self.key, reason)) + def skip(self): + self.manager.log.debug("%s: Force skipping" % self.key) if self.thread: - self.thread.kill(exception=Debug.createNotifyType("Worker skipping (reason: %s)" % reason)) + self.thread.kill(exception=Debug.Notify("Worker stopped")) self.start() # Force stop the worker - def stop(self, reason="Unknown"): - self.manager.log.debug("%s: Force stopping (reason: %s)" % (self.key, reason)) + def stop(self): + self.manager.log.debug("%s: Force stopping" % self.key) self.running = False if self.thread: - self.thread.kill(exception=Debug.createNotifyType("Worker stopped (reason: %s)" % reason)) + self.thread.kill(exception=Debug.Notify("Worker stopped")) del self.thread self.manager.removeWorker(self) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index f68e8410..ae12e976 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -1,15 +1,14 @@ import time import logging +import random import collections import gevent -from .Worker import Worker -from .WorkerTaskManager import WorkerTaskManager +from Worker import Worker from Config import config from util import helper from Plugin import PluginManager -from Debug.DebugLock import DebugLock import util @@ -19,17 +18,15 @@ class WorkerManager(object): def __init__(self, site): self.site = site self.workers = {} # Key: ip:port, Value: Worker.Worker - self.tasks = WorkerTaskManager() - self.next_task_id = 1 - self.lock_add_task = DebugLock(name="Lock AddTask:%s" % self.site.address_short) - # {"id": 1, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": None, - # "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids, "lock": None or gevent.lock.RLock} + self.tasks = [] + # {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": None, + # "time_started": None, "time_added": time.time(), "peers": peers, "priority": 0, "failed": peer_ids} self.started_task_num = 0 # Last added task num self.asked_peers = [] self.running = True self.time_task_added = 0 self.log = logging.getLogger("WorkerManager:%s" % self.site.address_short) - self.site.greenlet_manager.spawn(self.checkTasks) + self.process_taskchecker = gevent.spawn(self.checkTasks) def __str__(self): return "WorkerManager %s" % self.site.address_short @@ -41,82 +38,75 @@ class WorkerManager(object): def checkTasks(self): while self.running: tasks = task = worker = workers = None # Cleanup local variables - announced = False time.sleep(15) # Check every 15 sec # Clean up workers - for worker in list(self.workers.values()): + for worker in self.workers.values(): if worker.task and worker.task["done"]: - worker.skip(reason="Task done") # Stop workers with task done + worker.skip() # Stop workers with task done if not self.tasks: continue tasks = self.tasks[:] # Copy it so removing elements wont cause any problem - num_tasks_started = len([task for task in tasks if task["time_started"]]) - - self.log.debug( - "Tasks: %s, started: %s, bad files: %s, total started: %s" % - (len(tasks), num_tasks_started, len(self.site.bad_files), self.started_task_num) - ) - for task in tasks: - if task["time_started"] and time.time() >= task["time_started"] + 60: + size_extra_time = task["size"] / (1024 * 100) # 1 second for every 100k + if task["time_started"] and time.time() >= task["time_started"] + 60 + size_extra_time: self.log.debug("Timeout, Skipping: %s" % task) # Task taking too long time, skip it # Skip to next file workers workers = self.findWorkers(task) if workers: for worker in workers: - worker.skip(reason="Task timeout") + worker.skip() else: - self.failTask(task, reason="No workers") - - elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left - self.failTask(task, reason="Timeout") + self.failTask(task) + elif time.time() >= task["time_added"] + 60 + size_extra_time and not self.workers: # No workers left + self.log.debug("Timeout, Cleanup task: %s" % task) + # Remove task + self.failTask(task) elif (task["time_started"] and time.time() >= task["time_started"] + 15) or not self.workers: # Find more workers: Task started more than 15 sec ago or no workers workers = self.findWorkers(task) self.log.debug( - "Slow task: %s, (workers: %s, optional_hash_id: %s, peers: %s, failed: %s, asked: %s)" % + "Slow task: %s 15+%ss, (workers: %s, optional_hash_id: %s, peers: %s, failed: %s, asked: %s)" % ( - task["inner_path"], len(workers), task["optional_hash_id"], + task["inner_path"], size_extra_time, len(workers), task["optional_hash_id"], len(task["peers"] or []), len(task["failed"]), len(self.asked_peers) ) ) - if not announced and task["site"].isAddedRecently(): - task["site"].announce(mode="more") # Find more peers - announced = True + task["site"].announce(mode="more") # Find more peers if task["optional_hash_id"]: if self.workers: if not task["time_started"]: ask_limit = 20 - else: + elif task["priority"] > 0: ask_limit = max(10, time.time() - task["time_started"]) + else: + ask_limit = max(10, (time.time() - task["time_started"]) / 2) if len(self.asked_peers) < ask_limit and len(task["peers"] or []) <= len(task["failed"]) * 2: # Re-search for high priority self.startFindOptional(find_more=True) - if task["peers"]: - peers_try = [peer for peer in task["peers"] if peer not in task["failed"] and peer not in workers] + elif task["peers"]: + peers_try = [peer for peer in task["peers"] if peer not in task["failed"]] if peers_try: - self.startWorkers(peers_try, force_num=5, reason="Task checker (optional, has peers)") + self.startWorkers(peers_try) else: self.startFindOptional(find_more=True) - else: - self.startFindOptional(find_more=True) else: if task["peers"]: # Release the peer lock self.log.debug("Task peer lock release: %s" % task["inner_path"]) task["peers"] = [] - self.startWorkers(reason="Task checker") - - if len(self.tasks) > len(self.workers) * 2 and len(self.workers) < self.getMaxWorkers(): - self.startWorkers(reason="Task checker (need more workers)") + self.startWorkers() + break # One reannounce per loop self.log.debug("checkTasks stopped running") # Returns the next free or less worked task def getTask(self, peer): + # Sort tasks by priority and worker numbers + self.tasks.sort(key=lambda task: task["priority"] - task["workers_num"] * 5, reverse=True) + for task in self.tasks: # Find a task if task["peers"] and peer not in task["peers"]: continue # This peer not allowed to pick this task @@ -124,16 +114,14 @@ class WorkerManager(object): continue # Peer already tried to solve this, but failed if task["optional_hash_id"] and task["peers"] is None: continue # No peers found yet for the optional task - if task["done"]: - continue return task - def removeSolvedFileTasks(self, mark_as_good=True): + def removeGoodFileTasks(self): for task in self.tasks[:]: if task["inner_path"] not in self.site.bad_files: - self.log.debug("No longer in bad_files, marking as %s: %s" % (mark_as_good, task["inner_path"])) + self.log.debug("No longer in bad_files, marking as good: %s" % task["inner_path"]) task["done"] = True - task["evt"].set(mark_as_good) + task["evt"].set(True) self.tasks.remove(task) if not self.tasks: self.started_task_num = 0 @@ -141,7 +129,7 @@ class WorkerManager(object): # New peers added to site def onPeers(self): - self.startWorkers(reason="More peers found") + self.startWorkers() def getMaxWorkers(self): if len(self.tasks) > 50: @@ -150,75 +138,47 @@ class WorkerManager(object): return config.workers # Add new worker - def addWorker(self, peer, multiplexing=False, force=False): + def addWorker(self, peer): key = peer.key - if len(self.workers) > self.getMaxWorkers() and not force: - return False - if multiplexing: # Add even if we already have worker for this peer - key = "%s/%s" % (key, len(self.workers)) - if key not in self.workers: + if key not in self.workers and len(self.workers) < self.getMaxWorkers(): # We dont have worker for that peer and workers num less than max - task = self.getTask(peer) - if task: - worker = Worker(self, peer) - self.workers[key] = worker - worker.key = key - worker.start() - return worker - else: - return False - else: # We have worker for this peer or its over the limit + worker = Worker(self, peer) + self.workers[key] = worker + worker.key = key + worker.start() + return worker + else: # We have woker for this peer or its over the limit return False - def taskAddPeer(self, task, peer): - if task["peers"] is None: - task["peers"] = [] - if peer in task["failed"]: - return False - - if peer not in task["peers"]: - task["peers"].append(peer) - return True - # Start workers to process tasks - def startWorkers(self, peers=None, force_num=0, reason="Unknown"): + def startWorkers(self, peers=None): if not self.tasks: return False # No task for workers - max_workers = min(self.getMaxWorkers(), len(self.site.peers)) - if len(self.workers) >= max_workers and not peers: + if len(self.workers) >= self.getMaxWorkers() and not peers: return False # Workers number already maxed and no starting peers defined - self.log.debug( - "Starting workers (%s), tasks: %s, peers: %s, workers: %s" % - (reason, len(self.tasks), len(peers or []), len(self.workers)) - ) + self.log.debug("Starting workers, tasks: %s, peers: %s, workers: %s" % (len(self.tasks), len(peers or []), len(self.workers))) if not peers: peers = self.site.getConnectedPeers() - if len(peers) < max_workers: - peers += self.site.getRecentPeers(max_workers * 2) + if len(peers) < self.getMaxWorkers(): + peers += self.site.peers.values()[0:self.getMaxWorkers()] if type(peers) is set: peers = list(peers) # Sort by ping - peers.sort(key=lambda peer: peer.connection.last_ping_delay if peer.connection and peer.connection.last_ping_delay and len(peer.connection.waiting_requests) == 0 and peer.connection.connected else 9999) + peers.sort(key = lambda peer: peer.connection.last_ping_delay if peer.connection and len(peer.connection.waiting_requests) == 0 else 9999) for peer in peers: # One worker for every peer if peers and peer not in peers: continue # If peers defined and peer not valid - - if force_num: - worker = self.addWorker(peer, force=True) - force_num -= 1 - else: - worker = self.addWorker(peer) - + worker = self.addWorker(peer) if worker: - self.log.debug("Added worker: %s (rep: %s), workers: %s/%s" % (peer.key, peer.reputation, len(self.workers), max_workers)) + self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), self.getMaxWorkers())) # Find peers for optional hash in local hash tables and add to task peers def findOptionalTasks(self, optional_tasks, reset_task=False): found = collections.defaultdict(list) # { found_hash: [peer1, peer2...], ...} - for peer in list(self.site.peers.values()): + for peer in self.site.peers.values(): if not peer.has_hashfield: continue @@ -230,8 +190,11 @@ class WorkerManager(object): task["failed"] = [] if peer in task["failed"]: continue - if self.taskAddPeer(task, peer): - found[optional_hash_id].append(peer) + found[optional_hash_id].append(peer) + if task["peers"] and peer not in task["peers"]: + task["peers"].append(peer) + else: + task["peers"] = [peer] return found @@ -239,7 +202,7 @@ class WorkerManager(object): def findOptionalHashIds(self, optional_hash_ids, limit=0): found = collections.defaultdict(list) # { found_hash_id: [peer1, peer2...], ...} - for peer in list(self.site.peers.values()): + for peer in self.site.peers.values(): if not peer.has_hashfield: continue @@ -255,17 +218,20 @@ class WorkerManager(object): # Add peers to tasks from found result def addOptionalPeers(self, found_ips): found = collections.defaultdict(list) - for hash_id, peer_ips in found_ips.items(): + for hash_id, peer_ips in found_ips.iteritems(): task = [task for task in self.tasks if task["optional_hash_id"] == hash_id] if task: # Found task, lets take the first task = task[0] else: continue for peer_ip in peer_ips: - peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True, source="optional") + peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True) if not peer: continue - if self.taskAddPeer(task, peer): + if task["peers"] is None: + task["peers"] = [] + if peer not in task["peers"]: + task["peers"].append(peer) found[hash_id].append(peer) if peer.hashfield.appendHashId(hash_id): # Peer has this file peer.time_hashfield = None # Peer hashfield probably outdated @@ -278,7 +244,7 @@ class WorkerManager(object): # Wait for more file requests if len(self.tasks) < 20 or high_priority: time.sleep(0.01) - elif len(self.tasks) > 90: + if len(self.tasks) > 90: time.sleep(5) else: time.sleep(0.5) @@ -296,10 +262,10 @@ class WorkerManager(object): found = self.findOptionalTasks(optional_tasks, reset_task=reset_task) if found: - found_peers = set([peer for peers in list(found.values()) for peer in peers]) - self.startWorkers(found_peers, force_num=3, reason="Optional found in local peers") + found_peers = set([peer for peers in found.values() for peer in peers]) + self.startWorkers(found_peers) - if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.values())): + if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.itervalues())): self.log.debug("No local result for optional files: %s" % (optional_hash_ids - set(found))) # Query hashfield from connected peers @@ -308,7 +274,8 @@ class WorkerManager(object): if not peers: peers = self.site.getConnectablePeers() for peer in peers: - threads.append(self.site.greenlet_manager.spawn(peer.updateHashfield, force=find_more)) + if not peer.time_hashfield: + threads.append(gevent.spawn(peer.updateHashfield)) gevent.joinall(threads, timeout=5) if time_tasks != self.time_task_added: # New task added since start @@ -321,31 +288,24 @@ class WorkerManager(object): )) if found: - found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) - self.startWorkers(found_peers, force_num=3, reason="Optional found in connected peers") + found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + self.startWorkers(found_peers) if len(found) < len(optional_hash_ids) or find_more: - self.log.debug( - "No connected hashtable result for optional files: %s (asked: %s)" % - (optional_hash_ids - set(found), len(self.asked_peers)) - ) - if not self.tasks: - self.log.debug("No tasks, stopping finding optional peers") - return + self.log.debug("No connected hashtable result for optional files: %s" % (optional_hash_ids - set(found))) # Try to query connected peers threads = [] - peers = [peer for peer in self.site.getConnectedPeers() if peer.key not in self.asked_peers][0:10] + peers = [peer for peer in self.site.getConnectedPeers() if peer not in self.asked_peers] if not peers: - peers = self.site.getConnectablePeers(ignore=self.asked_peers) + peers = self.site.getConnectablePeers() for peer in peers: - threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids))) - self.asked_peers.append(peer.key) + threads.append(gevent.spawn(peer.findHashIds, list(optional_hash_ids))) + self.asked_peers.append(peer) for i in range(5): time.sleep(1) - thread_values = [thread.value for thread in threads if thread.value] if not thread_values: continue @@ -357,18 +317,15 @@ class WorkerManager(object): )) if found: - found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) - self.startWorkers(found_peers, force_num=3, reason="Optional found by findhash connected peers") + found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + self.startWorkers(found_peers) if len(thread_values) == len(threads): # Got result from all started thread break if len(found) < len(optional_hash_ids): - self.log.debug( - "No findHash result, try random peers: %s (asked: %s)" % - (optional_hash_ids - set(found), len(self.asked_peers)) - ) + self.log.debug("No findHash result, try random peers: %s" % (optional_hash_ids - set(found))) # Try to query random peers if time_tasks != self.time_task_added: # New task added since start @@ -379,8 +336,8 @@ class WorkerManager(object): peers = self.site.getConnectablePeers(ignore=self.asked_peers) for peer in peers: - threads.append(self.site.greenlet_manager.spawn(peer.findHashIds, list(optional_hash_ids))) - self.asked_peers.append(peer.key) + threads.append(gevent.spawn(peer.findHashIds, list(optional_hash_ids))) + self.asked_peers.append(peer) gevent.joinall(threads, timeout=15) @@ -389,33 +346,24 @@ class WorkerManager(object): self.log.debug("Found optional files after findhash random peers: %s/%s" % (len(found), len(optional_hash_ids))) if found: - found_peers = set([peer for hash_id_peers in list(found.values()) for peer in hash_id_peers]) - self.startWorkers(found_peers, force_num=3, reason="Option found using findhash random peers") + found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) + self.startWorkers(found_peers) if len(found) < len(optional_hash_ids): self.log.debug("No findhash result for optional files: %s" % (optional_hash_ids - set(found))) - if time_tasks != self.time_task_added: # New task added since start - self.log.debug("New task since start, restarting...") - self.site.greenlet_manager.spawnLater(0.1, self.startFindOptional) - else: - self.log.debug("startFindOptional ended") - # Stop all worker def stopWorkers(self): - num = 0 - for worker in list(self.workers.values()): - worker.stop(reason="Stopping all workers") - num += 1 + for worker in self.workers.values(): + worker.stop() tasks = self.tasks[:] # Copy for task in tasks: # Mark all current task as failed - self.failTask(task, reason="Stopping all workers") - return num + self.failTask(task) # Find workers by task def findWorkers(self, task): workers = [] - for worker in list(self.workers.values()): + for worker in self.workers.values(): if worker.task == task: workers.append(worker) return workers @@ -427,15 +375,12 @@ class WorkerManager(object): del(self.workers[worker.key]) self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), self.getMaxWorkers())) if len(self.workers) <= self.getMaxWorkers() / 3 and len(self.asked_peers) < 10: - optional_task = next((task for task in self.tasks if task["optional_hash_id"]), None) - if optional_task: - if len(self.workers) == 0: - self.startFindOptional(find_more=True) - else: - self.startFindOptional() - elif self.tasks and not self.workers and worker.task and len(worker.task["failed"]) < 20: - self.log.debug("Starting new workers... (tasks: %s)" % len(self.tasks)) - self.startWorkers(reason="Removed worker") + important_task = (task for task in self.tasks if task["priority"] > 0) + if next(important_task, None) or len(self.asked_peers) == 0: + self.startFindOptional(find_more=True) + else: + self.startFindOptional() + # Tasks sorted by this def getPriorityBoost(self, inner_path): @@ -445,124 +390,97 @@ class WorkerManager(object): return 9998 # index.html also important if "-default" in inner_path: return -4 # Default files are cloning not important - elif inner_path.endswith("all.css"): - return 14 # boost css files priority - elif inner_path.endswith("all.js"): - return 13 # boost js files priority + elif inner_path.endswith(".css"): + return 7 # boost css files priority + elif inner_path.endswith(".js"): + return 6 # boost js files priority elif inner_path.endswith("dbschema.json"): - return 12 # boost database specification + return 5 # boost database specification elif inner_path.endswith("content.json"): return 1 # boost included content.json files priority a bit elif inner_path.endswith(".json"): - if len(inner_path) < 50: # Boost non-user json files - return 11 + if len(inner_path) < 50: # Boost non-user json files more + return 4 else: return 2 return 0 - def addTaskUpdate(self, task, peer, priority=0): - if priority > task["priority"]: - self.tasks.updateItem(task, "priority", priority) - if peer and task["peers"]: # This peer also has new version, add it to task possible peers - task["peers"].append(peer) - self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"])) - self.startWorkers([peer], reason="Added new task (update received by peer)") - elif peer and peer in task["failed"]: - task["failed"].remove(peer) # New update arrived, remove the peer from failed peers - self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"])) - self.startWorkers([peer], reason="Added new task (peer failed before)") + # Create new task and return asyncresult + def addTask(self, inner_path, peer=None, priority=0): + self.site.onFileStart(inner_path) # First task, trigger site download started + task = self.findTask(inner_path) + if task: # Already has task for that file + if peer and task["peers"]: # This peer also has new version, add it to task possible peers + task["peers"].append(peer) + self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"])) + self.startWorkers([peer]) + elif peer and peer in task["failed"]: + task["failed"].remove(peer) # New update arrived, remove the peer from failed peers + self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"])) + self.startWorkers([peer]) - def addTaskCreate(self, inner_path, peer, priority=0, file_info=None): - evt = gevent.event.AsyncResult() - if peer: - peers = [peer] # Only download from this peer - else: - peers = None - if not file_info: + if priority: + task["priority"] += priority # Boost on priority + return task["evt"] + else: # No task for that file yet + evt = gevent.event.AsyncResult() + if peer: + peers = [peer] # Only download from this peer + else: + peers = None file_info = self.site.content_manager.getFileInfo(inner_path) - if file_info and file_info["optional"]: - optional_hash_id = helper.toHashId(file_info["sha512"]) - else: - optional_hash_id = None - if file_info: - size = file_info.get("size", 0) - else: - size = 0 + if file_info and file_info["optional"]: + optional_hash_id = helper.toHashId(file_info["sha512"]) + else: + optional_hash_id = None + if file_info: + size = file_info.get("size", 0) + else: + size = 0 + priority += self.getPriorityBoost(inner_path) - self.lock_add_task.acquire() + if self.started_task_num == 0: # Boost priority for first requested file + priority += 1 - # Check again if we have task for this file - task = self.tasks.findTask(inner_path) - if task: - self.addTaskUpdate(task, peer, priority) - return task + task = { + "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, + "optional_hash_id": optional_hash_id, "time_added": time.time(), "time_started": None, + "time_action": None, "peers": peers, "priority": priority, "failed": [], "size": size + } - priority += self.getPriorityBoost(inner_path) + self.tasks.append(task) - if self.started_task_num == 0: # Boost priority for first requested file - priority += 1 - - task = { - "id": self.next_task_id, "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, - "optional_hash_id": optional_hash_id, "time_added": time.time(), "time_started": None, "lock": None, - "time_action": None, "peers": peers, "priority": priority, "failed": [], "size": size - } - - self.tasks.append(task) - self.lock_add_task.release() - - self.next_task_id += 1 - self.started_task_num += 1 - if config.verbose: + self.started_task_num += 1 self.log.debug( "New task: %s, peer lock: %s, priority: %s, optional_hash_id: %s, tasks started: %s" % (task["inner_path"], peers, priority, optional_hash_id, self.started_task_num) ) + self.time_task_added = time.time() - self.time_task_added = time.time() + if optional_hash_id: + if self.asked_peers: + del self.asked_peers[:] # Reset asked peers + self.startFindOptional(high_priority=priority > 0) - if optional_hash_id: - if self.asked_peers: - del self.asked_peers[:] # Reset asked peers - self.startFindOptional(high_priority=priority > 0) + if peers: + self.startWorkers(peers) - if peers: - self.startWorkers(peers, reason="Added new optional task") + else: + self.startWorkers(peers) + return evt - else: - self.startWorkers(peers, reason="Added new task") - return task - - # Create new task and return asyncresult - def addTask(self, inner_path, peer=None, priority=0, file_info=None): - self.site.onFileStart(inner_path) # First task, trigger site download started - task = self.tasks.findTask(inner_path) - if task: # Already has task for that file - self.addTaskUpdate(task, peer, priority) - else: # No task for that file yet - task = self.addTaskCreate(inner_path, peer, priority, file_info) - return task - - def addTaskWorker(self, task, worker): - try: - self.tasks.updateItem(task, "workers_num", task["workers_num"] + 1) - except ValueError: - task["workers_num"] += 1 - - def removeTaskWorker(self, task, worker): - try: - self.tasks.updateItem(task, "workers_num", task["workers_num"] - 1) - except ValueError: - task["workers_num"] -= 1 - if len(task["failed"]) >= len(self.workers): - fail_reason = "Too many fails: %s (workers: %s)" % (len(task["failed"]), len(self.workers)) - self.failTask(task, reason=fail_reason) + # Find a task using inner_path + def findTask(self, inner_path): + for task in self.tasks: + if task["inner_path"] == inner_path: + return task + return None # Not found # Wait for other tasks def checkComplete(self): time.sleep(0.1) if not self.tasks: - self.log.debug("Check complete: No tasks") + self.log.debug("Check compelte: No tasks") self.onComplete() def onComplete(self): @@ -575,26 +493,19 @@ class WorkerManager(object): task["done"] = True self.tasks.remove(task) # Remove from queue if task["optional_hash_id"]: - self.log.debug( - "Downloaded optional file in %.3fs, adding to hashfield: %s" % - (time.time() - task["time_started"], task["inner_path"]) - ) + self.log.debug("Downloaded optional file, adding to hashfield: %s" % task["inner_path"]) self.site.content_manager.optionalDownloaded(task["inner_path"], task["optional_hash_id"], task["size"]) self.site.onFileDone(task["inner_path"]) task["evt"].set(True) if not self.tasks: - self.site.greenlet_manager.spawn(self.checkComplete) + gevent.spawn(self.checkComplete) # Mark a task failed - def failTask(self, task, reason="Unknown"): - try: + def failTask(self, task): + if task in self.tasks: + task["done"] = True self.tasks.remove(task) # Remove from queue - except ValueError as err: - return False - - self.log.debug("Task %s failed (Reason: %s)" % (task["inner_path"], reason)) - task["done"] = True - self.site.onFileFail(task["inner_path"]) - task["evt"].set(False) - if not self.tasks: - self.site.greenlet_manager.spawn(self.checkComplete) + self.site.onFileFail(task["inner_path"]) + task["evt"].set(False) + if not self.tasks: + self.started_task_num = 0 diff --git a/src/Worker/WorkerTaskManager.py b/src/Worker/WorkerTaskManager.py deleted file mode 100644 index 9359701d..00000000 --- a/src/Worker/WorkerTaskManager.py +++ /dev/null @@ -1,122 +0,0 @@ -import bisect -from collections.abc import MutableSequence - - -class CustomSortedList(MutableSequence): - def __init__(self): - super().__init__() - self.items = [] # (priority, added index, actual value) - self.logging = False - - def __repr__(self): - return "<{0} {1}>".format(self.__class__.__name__, self.items) - - def __len__(self): - return len(self.items) - - def __getitem__(self, index): - if type(index) is int: - return self.items[index][2] - else: - return [item[2] for item in self.items[index]] - - def __delitem__(self, index): - del self.items[index] - - def __setitem__(self, index, value): - self.items[index] = self.valueToItem(value) - - def __str__(self): - return str(self[:]) - - def insert(self, index, value): - self.append(value) - - def append(self, value): - bisect.insort(self.items, self.valueToItem(value)) - - def updateItem(self, value, update_key=None, update_value=None): - self.remove(value) - if update_key is not None: - value[update_key] = update_value - self.append(value) - - def sort(self, *args, **kwargs): - raise Exception("Sorted list can't be sorted") - - def valueToItem(self, value): - return (self.getPriority(value), self.getId(value), value) - - def getPriority(self, value): - return value - - def getId(self, value): - return id(value) - - def indexSlow(self, value): - for pos, item in enumerate(self.items): - if item[2] == value: - return pos - return None - - def index(self, value): - item = (self.getPriority(value), self.getId(value), value) - bisect_pos = bisect.bisect(self.items, item) - 1 - if bisect_pos >= 0 and self.items[bisect_pos][2] == value: - return bisect_pos - - # Item probably changed since added, switch to slow iteration - pos = self.indexSlow(value) - - if self.logging: - print("Slow index for %s in pos %s bisect: %s" % (item[2], pos, bisect_pos)) - - if pos is None: - raise ValueError("%r not in list" % value) - else: - return pos - - def __contains__(self, value): - try: - self.index(value) - return True - except ValueError: - return False - - -class WorkerTaskManager(CustomSortedList): - def __init__(self): - super().__init__() - self.inner_paths = {} - - def getPriority(self, value): - return 0 - (value["priority"] - value["workers_num"] * 10) - - def getId(self, value): - return value["id"] - - def __contains__(self, value): - return value["inner_path"] in self.inner_paths - - def __delitem__(self, index): - # Remove from inner path cache - del self.inner_paths[self.items[index][2]["inner_path"]] - super().__delitem__(index) - - # Fast task search by inner_path - - def append(self, task): - if task["inner_path"] in self.inner_paths: - raise ValueError("File %s already has a task" % task["inner_path"]) - super().append(task) - # Create inner path cache for faster lookup by filename - self.inner_paths[task["inner_path"]] = task - - def remove(self, task): - if task not in self: - raise ValueError("%r not in list" % task) - else: - super().remove(task) - - def findTask(self, inner_path): - return self.inner_paths.get(inner_path, None) diff --git a/src/Worker/__init__.py b/src/Worker/__init__.py index f4d20a96..26649852 100644 --- a/src/Worker/__init__.py +++ b/src/Worker/__init__.py @@ -1,2 +1,2 @@ -from .Worker import Worker -from .WorkerManager import WorkerManager +from Worker import Worker +from WorkerManager import WorkerManager \ No newline at end of file diff --git a/src/lib/BitcoinECC/BitcoinECC.py b/src/lib/BitcoinECC/BitcoinECC.py new file mode 100644 index 00000000..6f481132 --- /dev/null +++ b/src/lib/BitcoinECC/BitcoinECC.py @@ -0,0 +1,467 @@ +# By: HurlSly +# Source: https://github.com/HurlSly/Python/blob/master/BitcoinECC.py +# Modified: random number generator in def GeneratePrivateKey(self): + +import random +import hashlib +import os + +class GaussInt: + #A class for the Gauss integers of the form a + b sqrt(n) where a,b are integers. + #n can be positive or negative. + def __init__(self,x,y,n,p=0): + if p: + self.x=x%p + self.y=y%p + self.n=n%p + else: + self.x=x + self.y=y + self.n=n + + self.p=p + + def __add__(self,b): + return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p) + + def __sub__(self,b): + return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p) + + def __mul__(self,b): + return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p) + + def __div__(self,b): + return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p) + + def __eq__(self,b): + return self.x==b.x and self.y==b.y + + def __repr__(self): + if self.p: + return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p) + else: + return "%s+%s (%d)"%(self.x,self.y,self.n) + + def __pow__(self,n): + b=Base(n,2) + t=GaussInt(1,0,self.n) + while b: + t=t*t + if b.pop(): + t=self*t + + return t + + def Inv(self): + return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p) + +def Cipolla(a,p): + #Find a square root of a modulo p using the algorithm of Cipolla + b=0 + while pow((b*b-a)%p,(p-1)/2,p)==1: + b+=1 + + return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x + +def Base(n,b): + #Decompose n in base b + l=[] + while n: + l.append(n%b) + n/=b + + return l + +def InvMod(a,n): + #Find the inverse mod n of a. + #Use the Extended Euclides Algorithm. + m=[] + + s=n + while n: + m.append(a/n) + (a,n)=(n,a%n) + + u=1 + v=0 + while m: + (u,v)=(v,u-m.pop()*v) + + return u%s + +def b58encode(v): + #Encode a byte string to the Base58 + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=256 + val+=ord(c) + + result="" + while val: + (val,mod)=divmod(val,base) + result=digit[mod]+result + + pad=0 + for c in v: + if c=="\0": + pad+=1 + else: + break + + return (digit[0]*pad)+result + +def b58decode(v): + #Decode a Base58 string to byte string + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=base + val+=digit.find(c) + + result="" + while val: + (val,mod)=divmod(val,256) + result=chr(mod)+result + + pad=0 + for c in v: + if c==digit[0]: + pad+=1 + else: + break + + result="\0"*pad+result + + return result + +def Byte2Hex(b): + #Convert a byte string to hex number + out="" + for x in b: + y=hex(ord(x))[2:] + if len(y)==1: + y="0"+y + out+="%2s"%y + + return out + +def Int2Byte(n,b): + #Convert a integer to a byte string of length b + out="" + + for i in range(b): + (n,m)=divmod(n,256) + out=chr(m)+out + + return out + +class EllipticCurvePoint: + #Main class + #It is an point on an Elliptic Curve + + def __init__(self,x,a,b,p,n=0): + #We store the coordinate in x and the elliptic curbe parameter. + #x is of length 3. This is the 3 projective coordinates of the point. + self.x=x[:] + self.a=a + self.b=b + self.p=p + self.n=n + + def EqualProj(self,y): + #Does y equals self ? + #It computes self cross product with y and check if the result is 0. + return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2] + + def __add__(self,y): + #The main function to add self and y + #It uses the formulas I derived in projective coordinates. + #Projectives coordinates are more performant than the usual (x,y) coordinates + #because it we don't need to compute inverse mod p, which is faster. + z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p) + + if self.EqualProj(y): + d=(2*self.x[1]*self.x[2])%self.p + d3=pow(d,3,self.p) + n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p + + z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p + z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p + z.x[2]=(self.x[2]*d3)%self.p + else: + d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p + d3=pow(d,3,self.p) + n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p + + z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p + z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p + z.x[2]=(self.x[2]*d3*y.x[2])%self.p + + return z + + def __mul__(self,n): + #The fast multiplication of point n times by itself. + b=Base(n,2) + t=EllipticCurvePoint(self.x,self.a,self.b,self.p) + b.pop() + while b: + t+=t + if b.pop(): + t+=self + + return t + + def __repr__(self): + #print a point in (x,y) coordinate. + return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p) + + def __eq__(self,x): + #Does self==x ? + return self.x==x.x and self.a==x.a and self.b==x.b and self.p==x.p + + def __ne__(self,x): + #Does self!=x ? + return self.x!=x.x or self.a!=x.a or self.b!=x.b or self.p!=x.p + + def Check(self): + #Is self on the curve ? + return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0 + + def GeneratePrivateKey(self): + #Generate a private key. It's just a random number between 1 and n-1. + #Of course, this function isn't cryptographically secure. + #Don't use it to generate your key. Use a cryptographically secure source of randomness instead. + #self.d = random.randint(1,self.n-1) + self.d = random.SystemRandom().randint(1,self.n-1) # Better random fix + + def SignECDSA(self,m): + #Sign a message. The private key is self.d . + h=hashlib.new("SHA256") + h.update(m) + z=int(h.hexdigest(),16) + + r=0 + s=0 + while not r or not s: + #k=random.randint(1,self.n-1) + k=random.SystemRandom().randint(1,self.n-1) # Better random fix + R=self*k + R.Normalize() + r=R.x[0]%self.n + s=(InvMod(k,self.n)*(z+r*self.d))%self.n + + return (r,s) + + def CheckECDSA(self,sig,m): + #Check a signature (r,s) of the message m using the public key self.Q + # and the generator which is self. + #This is not the one used by Bitcoin because the public key isn't known; + # only a hash of the public key is known. See the next function. + (r,s)=sig + + h=hashlib.new("SHA256") + h.update(m) + z=int(h.hexdigest(),16) + + if self.Q.x[2]==0: + return False + if not self.Q.Check(): + return False + if (self.Q*self.n).x[2]!=0: + return False + if r<1 or r>self.n-1 or s<1 or s>self.n-1: + return False + + w=InvMod(s,self.n) + u1=(z*w)%self.n + u2=(r*w)%self.n + R=self*u1+self.Q*u2 + R.Normalize() + + return (R.x[0]-r)%self.n==0 + + def VerifyMessageFromBitcoinAddress(self,adresse,m,sig): + #Check a signature (r,s) for the message m signed by the Bitcoin + # address "addresse". + h=hashlib.new("SHA256") + h.update(m) + z=int(h.hexdigest(),16) + + (r,s)=sig + x=r + y2=(pow(x,3,self.p)+self.a*x+self.b)%self.p + y=Cipolla(y2,self.p) + + for i in range(2): + kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n) + mzG=self*((-z)%self.n) + self.Q=(kG*s+mzG)*InvMod(r,self.n) + + adr=self.BitcoinAddresFromPublicKey() + if adr==adresse: + break + y=(-y)%self.p + + if adr!=adresse: + return False + + return True + + def BitcoinAddressFromPrivate(self,pri=None): + #Transform a private key in base58 encoding to a bitcoin address. + #normal means "uncompressed". + if not pri: + print "Private Key :", + pri=raw_input() + + normal=(len(pri)==51) + pri=b58decode(pri) + + if normal: + pri=pri[1:-4] + else: + pri=pri[1:-5] + + self.d=int(Byte2Hex(pri),16) + + return self.BitcoinAddress(normal) + + def PrivateEncoding(self,normal=True): + #Encode a private key self.d to base58 encoding. + p=Int2Byte(self.d,32) + p="\80"+p + + if not normal: + p+=chr(1) + + h=hashlib.new("SHA256") + h.update(p) + s=h.digest() + + h=hashlib.new("SHA256") + h.update(s) + s=h.digest() + + cs=s[:4] + + p+=cs + p=b58encode(p) + + return p + + def BitcoinAddresFromPublicKey(self,normal=True): + #Find the bitcoin address from the public key self.Q + #We do normalization to go from the projective coordinates to the usual + # (x,y) coordinates. + self.Q.Normalize() + if normal: + pk=chr(4)+Int2Byte(self.Q.x[0],32)+Int2Byte((self.Q.x[1])%self.p,32) + else: + if self.Q.x[1]%2==0: + pk=chr(2)+Int2Byte(self.Q.x[0],32) + else: + pk=chr(3)+Int2Byte(self.Q.x[0],32) + + version=chr(0) + + h=hashlib.new("SHA256") + h.update(pk) + s=h.digest() + + h=hashlib.new("RIPEMD160") + h.update(s) + kh=version+h.digest() + + h=hashlib.new("SHA256") + h.update(kh) + cs=h.digest() + + h=hashlib.new("SHA256") + h.update(cs) + cs=h.digest()[:4] + + adr=b58encode(kh+cs) + + return adr + + def BitcoinAddress(self,normal=True): + #Computes a bitcoin address given the private key self.d. + self.Q=self*self.d + + return self.BitcoinAddresFromPublicKey(normal) + + def BitcoinAddressGenerator(self,k,filename): + #Generate Bitcoin address and write them in the filename in the multibit format. + #Change the date as you like. + f=open(filename,"w") + for i in range(k): + self.GeneratePrivateKey() + adr=self.BitcoinAddress() + p=self.PrivateEncoding() + f.write("#%s\n%s 2014-01-30T12:00:00Z\n"%(adr,p)) + + #print hex(self.d) + print adr,p + + f.close() + + def TestSign(self): + #Test signature + self.GeneratePrivateKey() + self.Q=self*self.d + m="Hello World" + adresse=self.BitcoinAddresFromPublicKey() + (r,s)=self.SignECDSA(m) + + m="Hello World" + print self.VerifyMessageFromBitcoinAddress(adresse,m,r,s) + + def Normalize(self): + #Transform projective coordinates of self to the usual (x,y) coordinates. + if self.x[2]: + self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p + self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p + self.x[2]=1 + elif self.x[1]: + self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p + self.x[1]=1 + elif self.x[0]: + self.x[0]=1 + else: + raise Exception + +def Bitcoin(): + #Create the Bitcoin elliptiv curve + a=0 + b=7 + p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 + + #Create the generator G of the Bitcoin elliptic curve, with is order n. + Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16) + Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16) + n =int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16) + + #Create the generator + return EllipticCurvePoint([Gx,Gy,1],a,b,p,n) + + +if __name__ == "__main__": + bitcoin=Bitcoin() + + #Generate the public key from the private one + print bitcoin.BitcoinAddressFromPrivate("23DKRBLkeDbcSaddsMYLAHXhanPmGwkWAhSPVGbspAkc72Hw9BdrDF") + print bitcoin.BitcoinAddress() + + #Print the bitcoin address of the public key generated at the previous line + adr=bitcoin.BitcoinAddresFromPublicKey() + print adr + + #Sign a message with the current address + m="Hello World" + sig=bitcoin.SignECDSA("Hello World") + #Verify the message using only the bitcoin adress, the signature and the message. + #Not using the public key as it is not needed. + print bitcoin.VerifyMessageFromBitcoinAddress(adr,m,sig) diff --git a/src/lib/BitcoinECC/__init__.py b/src/lib/BitcoinECC/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/BitcoinECC/newBitcoinECC.py b/src/lib/BitcoinECC/newBitcoinECC.py new file mode 100644 index 00000000..b09386bc --- /dev/null +++ b/src/lib/BitcoinECC/newBitcoinECC.py @@ -0,0 +1,460 @@ +import random +import hashlib +import base64 + +class GaussInt: + def __init__(self,x,y,n,p=0): + if p: + self.x=x%p + self.y=y%p + self.n=n%p + else: + self.x=x + self.y=y + self.n=n + + self.p=p + + def __add__(self,b): + return GaussInt(self.x+b.x,self.y+b.y,self.n,self.p) + + def __sub__(self,b): + return GaussInt(self.x-b.x,self.y-b.y,self.n,self.p) + + def __mul__(self,b): + return GaussInt(self.x*b.x+self.n*self.y*b.y,self.x*b.y+self.y*b.x,self.n,self.p) + + def __div__(self,b): + return GaussInt((self.x*b.x-self.n*self.y*b.y)/(b.x*b.x-self.n*b.y*b.y),(-self.x*b.y+self.y*b.x)/(b.x*b.x-self.n*b.y*b.y),self.n,self.p) + + def __eq__(self,b): + return self.x==b.x and self.y==b.y + + def __repr__(self): + if self.p: + return "%s+%s (%d,%d)"%(self.x,self.y,self.n,self.p) + else: + return "%s+%s (%d)"%(self.x,self.y,self.n) + + def __pow__(self,n): + b=Base(n,2) + t=GaussInt(1,0,self.n) + while b: + t=t*t + if b.pop(): + t=self*t + + return t + + def Inv(self): + return GaussInt(self.x/(self.x*self.x-self.n*self.y*self.y),-self.y/(self.x*self.x-self.n*self.y*self.y),self.n,self.p) + + def Eval(self): + return self.x.Eval()+self.y.Eval()*math.sqrt(self.n) + +def Cipolla(a,p): + b=0 + while pow((b*b-a)%p,(p-1)/2,p)==1: + b+=1 + + return (GaussInt(b,1,b**2-a,p)**((p+1)/2)).x + +def InvMod(a,n): + m=[] + + s=n + while n: + m.append(a/n) + (a,n)=(n,a%n) + + u=1 + v=0 + while m: + (u,v)=(v,u-m.pop()*v) + + return u%s + +def Base(n,b): + l=[] + while n: + l.append(n%b) + n/=b + + return l + +def MsgMagic(message): + return "\x18Bitcoin Signed Message:\n"+chr(len(message))+message + +def Hash(m,method): + h=hashlib.new(method) + h.update(m) + + return h.digest() + +def b58encode(v): + #Encode a byte string to the Base58 + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=256 + val+=ord(c) + + result="" + while val: + (val,mod)=divmod(val,base) + result=digit[mod]+result + + pad=0 + for c in v: + if c=="\x00": + pad+=1 + else: + break + + return (digit[0]*pad)+result + +def b58decode(v): + #Decode a Base58 string to byte string + digit="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + base=len(digit) + val=0 + for c in v: + val*=base + val+=digit.find(c) + + result="" + while val: + (val,mod)=divmod(val,256) + result=chr(mod)+result + + pad=0 + for c in v: + if c==digit[0]: + pad+=1 + else: + break + + return "\x00"*pad+result + +def Byte2Int(b): + n=0 + for x in b: + n*=256 + n+=ord(x) + + return n + +def Byte2Hex(b): + #Convert a byte string to hex number + out="" + for x in b: + y=hex(ord(x))[2:] + if len(y)==1: + y="0"+y + out+="%2s"%y + + return out + +def Int2Byte(n,b): + #Convert a integer to a byte string of length b + out="" + + for _ in range(b): + (n,m)=divmod(n,256) + out=chr(m)+out + + return out + +class EllipticCurvePoint: + #Main class + #It's a point on an Elliptic Curve + + def __init__(self,x,a,b,p,n=0): + #We store the coordinate in x and the elliptic curve parameter. + #x is of length 3. This is the 3 projective coordinates of the point. + self.x=x[:] + self.a=a + self.b=b + self.p=p + self.n=n + + def __add__(self,y): + #The main function to add self and y + #It uses the formulas I derived in projective coordinates. + #Projectives coordinates are more efficient than the usual (x,y) coordinates + #because we don't need to compute inverse mod p, which is faster. + z=EllipticCurvePoint([0,0,0],self.a,self.b,self.p) + + if self==y: + d=(2*self.x[1]*self.x[2])%self.p + d3=pow(d,3,self.p) + n=(3*pow(self.x[0],2,self.p)+self.a*pow(self.x[2],2,self.p))%self.p + + z.x[0]=(pow(n,2,self.p)*d*self.x[2]-2*d3*self.x[0])%self.p + z.x[1]=(3*self.x[0]*n*pow(d,2,self.p)-pow(n,3,self.p)*self.x[2]-self.x[1]*d3)%self.p + z.x[2]=(self.x[2]*d3)%self.p + else: + d=(y.x[0]*self.x[2]-y.x[2]*self.x[0])%self.p + d3=pow(d,3,self.p) + n=(y.x[1]*self.x[2]-self.x[1]*y.x[2])%self.p + + z.x[0]=(y.x[2]*self.x[2]*pow(n,2,self.p)*d-d3*(y.x[2]*self.x[0]+y.x[0]*self.x[2]))%self.p + z.x[1]=(pow(d,2,self.p)*n*(2*self.x[0]*y.x[2]+y.x[0]*self.x[2])-pow(n,3,self.p)*self.x[2]*y.x[2]-self.x[1]*d3*y.x[2])%self.p + z.x[2]=(self.x[2]*d3*y.x[2])%self.p + + return z + + def __mul__(self,n): + #The fast multiplication of point n times by itself. + b=Base(n,2) + t=EllipticCurvePoint(self.x,self.a,self.b,self.p) + b.pop() + while b: + t+=t + if b.pop(): + t+=self + + return t + + def __repr__(self): + #print a point in (x,y) coordinate. + return "x=%d\ny=%d\n"%((self.x[0]*InvMod(self.x[2],self.p))%self.p,(self.x[1]*InvMod(self.x[2],self.p))%self.p) + + def __eq__(self,y): + #Does self==y ? + #It computes self cross product with x and check if the result is 0. + return self.x[0]*y.x[1]==self.x[1]*y.x[0] and self.x[1]*y.x[2]==self.x[2]*y.x[1] and self.x[2]*y.x[0]==self.x[0]*y.x[2] and self.a==y.a and self.b==y.b and self.p==y.p + + def __ne__(self,y): + #Does self!=x ? + return not (self == y) + + def Normalize(self): + #Transform projective coordinates of self to the usual (x,y) coordinates. + if self.x[2]: + self.x[0]=(self.x[0]*InvMod(self.x[2],self.p))%self.p + self.x[1]=(self.x[1]*InvMod(self.x[2],self.p))%self.p + self.x[2]=1 + elif self.x[1]: + self.x[0]=(self.x[0]*InvMod(self.x[1],self.p))%self.p + self.x[1]=1 + elif self.x[0]: + self.x[0]=1 + else: + raise Exception + + def Check(self): + #Is self on the curve ? + return (self.x[0]**3+self.a*self.x[0]*self.x[2]**2+self.b*self.x[2]**3-self.x[1]**2*self.x[2])%self.p==0 + + + def CryptAddr(self,filename,password,Address): + txt="" + for tag in Address: + (addr,priv)=Address[tag] + if priv: + txt+="%s\t%s\t%s\n"%(tag,addr,priv) + else: + txt+="%s\t%s\t\n"%(tag,addr) + + txt+="\x00"*(15-(len(txt)-1)%16) + + password+="\x00"*(15-(len(password)-1)%16) + crypt=twofish.Twofish(password).encrypt(txt) + + f=open(filename,"wb") + f.write(crypt) + f.close() + + def GenerateD(self): + #Generate a private key. It's just a random number between 1 and n-1. + #Of course, this function isn't cryptographically secure. + #Don't use it to generate your key. Use a cryptographically secure source of randomness instead. + #return random.randint(1,self.n-1) + return random.SystemRandom().randint(1,self.n-1) # Better random fix + + def CheckECDSA(self,sig,message,Q): + #Check a signature (r,s) of the message m using the public key self.Q + # and the generator which is self. + #This is not the one used by Bitcoin because the public key isn't known; + # only a hash of the public key is known. See the function VerifyMessageFromAddress. + (r,s)=sig + + if Q.x[2]==0: + return False + if not Q.Check(): + return False + if (Q*self.n).x[2]!=0: + return False + if r<1 or r>self.n-1 or s<1 or s>self.n-1: + return False + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + w=InvMod(s,self.n) + u1=(z*w)%self.n + u2=(r*w)%self.n + R=self*u1+Q*u2 + R.Normalize() + + return (R.x[0]-r)%self.n==0 + + def SignMessage(self,message,priv): + #Sign a message. The private key is self.d. + (d,uncompressed)=self.DFromPriv(priv) + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + r=0 + s=0 + while not r or not s: + #k=random.randint(1,self.n-1) + k=random.SystemRandom().randint(1,self.n-1) # Better random fix + R=self*k + R.Normalize() + r=R.x[0]%self.n + s=(InvMod(k,self.n)*(z+r*d))%self.n + + val=27 + if not uncompressed: + val+=4 + + return base64.standard_b64encode(chr(val)+Int2Byte(r,32)+Int2Byte(s,32)) + + def VerifyMessageFromAddress(self,addr,message,sig): + #Check a signature (r,s) for the message m signed by the Bitcoin + # address "addr". + + sign=base64.standard_b64decode(sig) + (r,s)=(Byte2Int(sign[1:33]),Byte2Int(sign[33:65])) + + z=Byte2Int(Hash(Hash(MsgMagic(message),"SHA256"),"SHA256")) + + val=ord(sign[0]) + if val<27 or val>=35: + return False + + if val>=31: + uncompressed=False + val-=4 + else: + uncompressed=True + + x=r + y2=(pow(x,3,self.p) + self.a*x + self.b) % self.p + y=Cipolla(y2,self.p) + + for _ in range(2): + kG=EllipticCurvePoint([x,y,1],self.a,self.b,self.p,self.n) + mzG=self*((-z)%self.n) + Q=(kG*s+mzG)*InvMod(r,self.n) + + if self.AddressFromPublicKey(Q,uncompressed)==addr: + return True + + y=self.p-y + + return False + + def AddressFromPrivate(self,priv): + #Transform a private key to a bitcoin address. + (d,uncompressed)=self.DFromPriv(priv) + + return self.AddressFromD(d,uncompressed) + + def PrivFromD(self,d,uncompressed): + #Encode a private key self.d to base58 encoding. + p=Int2Byte(d,32) + p="\x80"+p + + if not uncompressed: + p+=chr(1) + + cs=Hash(Hash(p,"SHA256"),"SHA256")[:4] + + return b58encode(p+cs) + + def DFromPriv(self,priv): + uncompressed=(len(priv)==51) + priv=b58decode(priv) + + if uncompressed: + priv=priv[:-4] + else: + priv=priv[:-5] + + return (Byte2Int(priv[1:]),uncompressed) + + def AddressFromPublicKey(self,Q,uncompressed): + #Find the bitcoin address from the public key self.Q + #We do normalization to go from the projective coordinates to the usual + # (x,y) coordinates. + Q.Normalize() + if uncompressed: + pk=chr(4)+Int2Byte(Q.x[0],32)+Int2Byte(Q.x[1],32) + else: + pk=chr(2+Q.x[1]%2)+Int2Byte(Q.x[0],32) + + kh=chr(0)+Hash(Hash(pk,"SHA256"),"RIPEMD160") + cs=Hash(Hash(kh,"SHA256"),"SHA256")[:4] + + return b58encode(kh+cs) + + def AddressFromD(self,d,uncompressed): + #Computes a bitcoin address given the private key self.d. + return self.AddressFromPublicKey(self*d,uncompressed) + + def IsValid(self,addr): + adr=b58decode(addr) + kh=adr[:-4] + cs=adr[-4:] + + verif=Hash(Hash(kh,"SHA256"),"SHA256")[:4] + + return cs==verif + + def AddressGenerator(self,k,uncompressed=True): + #Generate Bitcoin address and write them in the multibit format. + #Change the date as you like. + liste={} + for i in range(k): + d=self.GenerateD() + addr=self.AddressFromD(d,uncompressed) + priv=self.PrivFromD(d,uncompressed) + liste[i]=[addr,priv] + print "%s %s"%(addr, priv) + + return liste + +def Bitcoin(): + a=0 + b=7 + p=2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 + Gx=int("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798",16) + Gy=int("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8",16) + n=int("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141",16) + + return EllipticCurvePoint([Gx,Gy,1],a,b,p,n) + +def main(): + bitcoin=Bitcoin() + + #Generate an adress from the private key + privkey = "PrivatekeyinBase58" + adr = bitcoin.AddressFromPrivate(privkey) + print "Address : ", adr + + #Sign a message with the current address + m="Hello World" + sig=bitcoin.SignMessage("Hello World", privkey) + #Verify the message using only the bitcoin adress, the signature and the message. + #Not using the public key as it is not needed. + if bitcoin.VerifyMessageFromAddress(adr,m,sig): + print "Message verified" + + #Generate some addresses + print "Here are some adresses and associated private keys" + bitcoin.AddressGenerator(10) + +if __name__ == "__main__": main() diff --git a/src/lib/Ed25519.py b/src/lib/Ed25519.py deleted file mode 100644 index 20bdc1a9..00000000 --- a/src/lib/Ed25519.py +++ /dev/null @@ -1,340 +0,0 @@ -## ZeroNet onion V3 support -## The following copied code is copied from stem.util.ed25519 official Tor Project python3 lib -## url : https://gitweb.torproject.org/stem.git/tree/stem/util/ed25519.py -## the ##modified tag means that the function has been modified respect to the one used by stem lib -## the ##custom tag means that the function has been added by me and it's not present on the stem ed25519.py file -## every comment i make begins with ## -## -# The following is copied from... -# -# https://github.com/pyca/ed25519 -# -# This is under the CC0 license. For more information please see... -# -# https://github.com/pyca/cryptography/issues/5068 - -# ed25519.py - Optimized version of the reference implementation of Ed25519 -# -# Written in 2011? by Daniel J. Bernstein -# 2013 by Donald Stufft -# 2013 by Alex Gaynor -# 2013 by Greg Price -# -# To the extent possible under law, the author(s) have dedicated all copyright -# and related and neighboring rights to this software to the public domain -# worldwide. This software is distributed without any warranty. -# -# You should have received a copy of the CC0 Public Domain Dedication along -# with this software. If not, see -# . - -""" -NB: This code is not safe for use with secret keys or secret data. -The only safe use of this code is for verifying signatures on public messages. - -Functions for computing the public key of a secret key and for signing -a message are included, namely publickey_unsafe and signature_unsafe, -for testing purposes only. - -The root of the problem is that Python's long-integer arithmetic is -not designed for use in cryptography. Specifically, it may take more -or less time to execute an operation depending on the values of the -inputs, and its memory access patterns may also depend on the inputs. -This opens it to timing and cache side-channel attacks which can -disclose data to an attacker. We rely on Python's long-integer -arithmetic, so we cannot handle secrets without risking their disclosure. -""" - -import hashlib -import operator -import sys -import base64 - - -__version__ = "1.0.dev0" - - -# Useful for very coarse version differentiation. -PY3 = sys.version_info[0] == 3 - -if PY3: - indexbytes = operator.getitem - intlist2bytes = bytes - int2byte = operator.methodcaller("to_bytes", 1, "big") -else: - int2byte = chr - range = list(range(1,10000000)) - - def indexbytes(buf, i): - return ord(buf[i]) - - def intlist2bytes(l): - return b"".join(chr(c) for c in l) - - -b = 256 -q = 2 ** 255 - 19 -l = 2 ** 252 + 27742317777372353535851937790883648493 - - -def H(m): - return hashlib.sha512(m).digest() - - -def pow2(x, p): - """== pow(x, 2**p, q)""" - while p > 0: - x = x * x % q - p -= 1 - return x - - -def inv(z): - """$= z^{-1} \mod q$, for z != 0""" - # Adapted from curve25519_athlon.c in djb's Curve25519. - z2 = z * z % q # 2 - z9 = pow2(z2, 2) * z % q # 9 - z11 = z9 * z2 % q # 11 - z2_5_0 = (z11 * z11) % q * z9 % q # 31 == 2^5 - 2^0 - z2_10_0 = pow2(z2_5_0, 5) * z2_5_0 % q # 2^10 - 2^0 - z2_20_0 = pow2(z2_10_0, 10) * z2_10_0 % q # ... - z2_40_0 = pow2(z2_20_0, 20) * z2_20_0 % q - z2_50_0 = pow2(z2_40_0, 10) * z2_10_0 % q - z2_100_0 = pow2(z2_50_0, 50) * z2_50_0 % q - z2_200_0 = pow2(z2_100_0, 100) * z2_100_0 % q - z2_250_0 = pow2(z2_200_0, 50) * z2_50_0 % q # 2^250 - 2^0 - return pow2(z2_250_0, 5) * z11 % q # 2^255 - 2^5 + 11 = q - 2 - - -d = -121665 * inv(121666) % q -I = pow(2, (q - 1) // 4, q) - - -def xrecover(y): - xx = (y * y - 1) * inv(d * y * y + 1) - x = pow(xx, (q + 3) // 8, q) - - if (x * x - xx) % q != 0: - x = (x * I) % q - - if x % 2 != 0: - x = q-x - - return x - - -By = 4 * inv(5) -Bx = xrecover(By) -B = (Bx % q, By % q, 1, (Bx * By) % q) -ident = (0, 1, 1, 0) - - -def edwards_add(P, Q): - # This is formula sequence 'addition-add-2008-hwcd-3' from - # http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html - (x1, y1, z1, t1) = P - (x2, y2, z2, t2) = Q - - a = (y1-x1)*(y2-x2) % q - b = (y1+x1)*(y2+x2) % q - c = t1*2*d*t2 % q - dd = z1*2*z2 % q - e = b - a - f = dd - c - g = dd + c - h = b + a - x3 = e*f - y3 = g*h - t3 = e*h - z3 = f*g - - return (x3 % q, y3 % q, z3 % q, t3 % q) - - -def edwards_double(P): - # This is formula sequence 'dbl-2008-hwcd' from - # http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html - (x1, y1, z1, t1) = P - - a = x1*x1 % q - b = y1*y1 % q - c = 2*z1*z1 % q - # dd = -a - e = ((x1+y1)*(x1+y1) - a - b) % q - g = -a + b # dd + b - f = g - c - h = -a - b # dd - b - x3 = e*f - y3 = g*h - t3 = e*h - z3 = f*g - - return (x3 % q, y3 % q, z3 % q, t3 % q) - - -def scalarmult(P, e): - if e == 0: - return ident - Q = scalarmult(P, e // 2) - Q = edwards_double(Q) - if e & 1: - Q = edwards_add(Q, P) - return Q - - -# Bpow[i] == scalarmult(B, 2**i) -Bpow = [] - - -def make_Bpow(): - P = B - for i in range(253): - Bpow.append(P) - P = edwards_double(P) -make_Bpow() - - -def scalarmult_B(e): - """ - Implements scalarmult(B, e) more efficiently. - """ - # scalarmult(B, l) is the identity - e = e % l - P = ident - for i in range(253): - if e & 1: - P = edwards_add(P, Bpow[i]) - e = e // 2 - assert e == 0, e - return P - - -def encodeint(y): - bits = [(y >> i) & 1 for i in range(b)] - return b''.join([ - int2byte(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b//8) - ]) - - -def encodepoint(P): - (x, y, z, t) = P - zi = inv(z) - x = (x * zi) % q - y = (y * zi) % q - bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1] - return b''.join([ - int2byte(sum([bits[i * 8 + j] << j for j in range(8)])) - for i in range(b // 8) - ]) - - -def bit(h, i): - return (indexbytes(h, i // 8) >> (i % 8)) & 1 - -##modified -def publickey_unsafe(sk): - """ - Not safe to use with secret keys or secret data. - - See module docstring. This function should be used for testing only. - """ - ##h = H(sk) - h = sk - a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) - A = scalarmult_B(a) - return encodepoint(A) - -##custom -## from stem.util.str_tools._to_unicode_impl -## from https://gitweb.torproject.org/stem.git/tree/stem/util/str_tools.py#n80 -def to_unicode_impl(msg): - if msg is not None and not isinstance(msg, str): - return msg.decode('utf-8', 'replace') - else: - return msg - -##custom -## rewritten stem.descriptor.hidden_service.address_from_identity_key -## from https://gitweb.torproject.org/stem.git/tree/stem/descriptor/hidden_service.py#n1088 -def publickey_to_onionaddress(key): - CHECKSUM_CONSTANT = b'.onion checksum' - ## version = stem.client.datatype.Size.CHAR.pack(3) - version = b'\x03' - checksum = hashlib.sha3_256(CHECKSUM_CONSTANT + key + version).digest()[:2] - onion_address = base64.b32encode(key + checksum + version) - return to_unicode_impl(onion_address + b'.onion').lower() - - -def Hint(m): - h = H(m) - return sum(2 ** i * bit(h, i) for i in range(2 * b)) - -##modified -def signature_unsafe(m, sk, pk): - """ - Not safe to use with secret keys or secret data. - - See module docstring. This function should be used for testing only. - """ - ##h = H(sk) - h = sk - a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2)) - r = Hint( - intlist2bytes([indexbytes(h, j) for j in range(b // 8, b // 4)]) + m - ) - R = scalarmult_B(r) - S = (r + Hint(encodepoint(R) + pk + m) * a) % l - return encodepoint(R) + encodeint(S) - - -def isoncurve(P): - (x, y, z, t) = P - return (z % q != 0 and - x*y % q == z*t % q and - (y*y - x*x - z*z - d*t*t) % q == 0) - - -def decodeint(s): - return sum(2 ** i * bit(s, i) for i in range(0, b)) - - -def decodepoint(s): - y = sum(2 ** i * bit(s, i) for i in range(0, b - 1)) - x = xrecover(y) - if x & 1 != bit(s, b-1): - x = q - x - P = (x, y, 1, (x*y) % q) - if not isoncurve(P): - raise ValueError("decoding point that is not on curve") - return P - - -class SignatureMismatch(Exception): - pass - - -def checkvalid(s, m, pk): - """ - Not safe to use when any argument is secret. - - See module docstring. This function should be used only for - verifying public signatures of public messages. - """ - if len(s) != b // 4: - raise ValueError("signature length is wrong") - - if len(pk) != b // 8: - raise ValueError("public-key length is wrong") - - R = decodepoint(s[:b // 8]) - A = decodepoint(pk) - S = decodeint(s[b // 8:b // 4]) - h = Hint(encodepoint(R) + pk + m) - - (x1, y1, z1, t1) = P = scalarmult_B(S) - (x2, y2, z2, t2) = Q = edwards_add(R, scalarmult(A, h)) - - if (not isoncurve(P) or not isoncurve(Q) or - (x1*z2 - x2*z1) % q != 0 or (y1*z2 - y2*z1) % q != 0): - raise SignatureMismatch("signature does not pass verification") diff --git a/src/lib/PySocks/LICENSE b/src/lib/PySocks/LICENSE new file mode 100644 index 00000000..04b6b1f3 --- /dev/null +++ b/src/lib/PySocks/LICENSE @@ -0,0 +1,22 @@ +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. diff --git a/src/lib/PySocks/README.md b/src/lib/PySocks/README.md new file mode 100644 index 00000000..65419240 --- /dev/null +++ b/src/lib/PySocks/README.md @@ -0,0 +1,299 @@ +PySocks +======= + +Updated version of SocksiPy. Many old bugs fixed, and overall code cleanup. + +Acts as a drop-in replacement to the socket module. + +---------------- + +Features +======== + +* Fully supports Python 2.6 - 3.4 + +* UDP support + +* SocksiPyHandler, courtesy e000, was also added as an example of how this module can be used with urllib2. See example code in sockshandler.py. `pip install` and `setup.py install` will automatically install the `sockshandler` module. + +* Bugs in the original SocksiPy were fixed, including two that could lead to infinite hanging when communicating with bad proxy servers. + +* urllib3, which powers the requests module, is working on integrating SOCKS proxy support based on this branch + +* `SOCKS5`, `SOCKS4`, and `HTTP` are now aliases for `PROXY_TYPE_SOCKS5`, `PROXY_TYPE_SOCKS4`, and `PROXY_TYPE_HTTP` + +* Tests added + +* Various style and performance improvements; codebase simplified + +* Actively maintained + +Installation +============ + + pip install PySocks + +Or download the tarball / `git clone` and... + + python setup.py install + +These will install both the `socks` and `sockshandler` modules. + +Alternatively, include just `socks.py` in your project. + +-------------------------------------------- + +*Warning:* PySocks/SocksiPy only supports HTTP proxies that use CONNECT tunneling. Certain HTTP proxies may not work with this library. If you wish to use HTTP proxies (and not SOCKS proxies), it is recommended that you rely on your HTTP client's native proxy support (`proxies` dict for `requests`, or `urllib2.ProxyHandler` for `urllib2`) instead. + +-------------------------------------------- + +Usage +===== + +## Example ## + + import socks + + s = socks.socksocket() + + s.set_proxy(socks.SOCKS5, "localhost") # SOCKS4 and SOCKS5 use port 1080 by default + # Or + s.set_proxy(socks.SOCKS4, "localhost", 4444) + # Or + s.set_proxy(socks.HTTP, "5.5.5.5", 8888) + + # Can be treated identical to a regular socket object + s.connect(("www.test.com", 80)) + s.sendall("GET / ...") + print s.recv(4096) + + +To monkeypatch the entire standard library with a single default proxy: + + import socket + import socks + import urllib2 + + socks.set_default_proxy(socks.SOCKS5, "localhost") + socket.socket = socks.socksocket + + urllib2.urlopen("http://...") # All requests will pass through the SOCKS proxy + +Note that monkeypatching may not work for all standard modules or for all third party modules, and generally isn't recommended. + +-------------------------------------------- + +Original SocksiPy README attached below, amended to reflect API changes. + +-------------------------------------------- + +SocksiPy - version 1.5.0 + +A Python SOCKS module. + +(C) 2006 Dan-Haim. All rights reserved. + +See LICENSE file for details. + + +*WHAT IS A SOCKS PROXY?* + +A SOCKS proxy is a proxy server at the TCP level. In other words, it acts as +a tunnel, relaying all traffic going through it without modifying it. +SOCKS proxies can be used to relay traffic using any network protocol that +uses TCP. + +*WHAT IS SOCKSIPY?* + +This Python module allows you to create TCP connections through a SOCKS +proxy without any special effort. +It also supports relaying UDP packets with a SOCKS5 proxy. + +*PROXY COMPATIBILITY* + +SocksiPy is compatible with three different types of proxies: + +1. SOCKS Version 4 (SOCKS4), including the SOCKS4a extension. +2. SOCKS Version 5 (SOCKS5). +3. HTTP Proxies which support tunneling using the CONNECT method. + +*SYSTEM REQUIREMENTS* + +Being written in Python, SocksiPy can run on any platform that has a Python +interpreter and TCP/IP support. +This module has been tested with Python 2.3 and should work with greater versions +just as well. + + +INSTALLATION +------------- + +Simply copy the file "socks.py" to your Python's `lib/site-packages` directory, +and you're ready to go. [Editor's note: it is better to use `python setup.py install` for PySocks] + + +USAGE +------ + +First load the socks module with the command: + + >>> import socks + >>> + +The socks module provides a class called `socksocket`, which is the base to all of the module's functionality. + +The `socksocket` object has the same initialization parameters as the normal socket +object to ensure maximal compatibility, however it should be noted that `socksocket` will only function with family being `AF_INET` and +type being either `SOCK_STREAM` or `SOCK_DGRAM`. +Generally, it is best to initialize the `socksocket` object with no parameters + + >>> s = socks.socksocket() + >>> + +The `socksocket` object has an interface which is very similiar to socket's (in fact +the `socksocket` class is derived from socket) with a few extra methods. +To select the proxy server you would like to use, use the `set_proxy` method, whose +syntax is: + + set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]]) + +Explanation of the parameters: + +`proxy_type` - The type of the proxy server. This can be one of three possible +choices: `PROXY_TYPE_SOCKS4`, `PROXY_TYPE_SOCKS5` and `PROXY_TYPE_HTTP` for SOCKS4, +SOCKS5 and HTTP servers respectively. `SOCKS4`, `SOCKS5`, and `HTTP` are all aliases, respectively. + +`addr` - The IP address or DNS name of the proxy server. + +`port` - The port of the proxy server. Defaults to 1080 for socks and 8080 for http. + +`rdns` - This is a boolean flag than modifies the behavior regarding DNS resolving. +If it is set to True, DNS resolving will be preformed remotely, on the server. +If it is set to False, DNS resolving will be preformed locally. Please note that +setting this to True with SOCKS4 servers actually use an extension to the protocol, +called SOCKS4a, which may not be supported on all servers (SOCKS5 and http servers +always support DNS). The default is True. + +`username` - For SOCKS5 servers, this allows simple username / password authentication +with the server. For SOCKS4 servers, this parameter will be sent as the userid. +This parameter is ignored if an HTTP server is being used. If it is not provided, +authentication will not be used (servers may accept unauthenticated requests). + +`password` - This parameter is valid only for SOCKS5 servers and specifies the +respective password for the username provided. + +Example of usage: + + >>> s.set_proxy(socks.SOCKS5, "socks.example.com") # uses default port 1080 + >>> s.set_proxy(socks.SOCKS4, "socks.test.com", 1081) + +After the set_proxy method has been called, simply call the connect method with the +traditional parameters to establish a connection through the proxy: + + >>> s.connect(("www.sourceforge.net", 80)) + >>> + +Connection will take a bit longer to allow negotiation with the proxy server. +Please note that calling connect without calling `set_proxy` earlier will connect +without a proxy (just like a regular socket). + +Errors: Any errors in the connection process will trigger exceptions. The exception +may either be generated by the underlying socket layer or may be custom module +exceptions, whose details follow: + +class `ProxyError` - This is a base exception class. It is not raised directly but +rather all other exception classes raised by this module are derived from it. +This allows an easy way to catch all proxy-related errors. It descends from `IOError`. + +All `ProxyError` exceptions have an attribute `socket_err`, which will contain either a +caught `socket.error` exception, or `None` if there wasn't any. + +class `GeneralProxyError` - When thrown, it indicates a problem which does not fall +into another category. + +* `Sent invalid data` - This error means that unexpected data has been received from +the server. The most common reason is that the server specified as the proxy is +not really a SOCKS4/SOCKS5/HTTP proxy, or maybe the proxy type specified is wrong. + +* `Connection closed unexpectedly` - The proxy server unexpectedly closed the connection. +This may indicate that the proxy server is experiencing network or software problems. + +* `Bad proxy type` - This will be raised if the type of the proxy supplied to the +set_proxy function was not one of `SOCKS4`/`SOCKS5`/`HTTP`. + +* `Bad input` - This will be raised if the `connect()` method is called with bad input +parameters. + +class `SOCKS5AuthError` - This indicates that the connection through a SOCKS5 server +failed due to an authentication problem. + +* `Authentication is required` - This will happen if you use a SOCKS5 server which +requires authentication without providing a username / password at all. + +* `All offered authentication methods were rejected` - This will happen if the proxy +requires a special authentication method which is not supported by this module. + +* `Unknown username or invalid password` - Self descriptive. + +class `SOCKS5Error` - This will be raised for SOCKS5 errors which are not related to +authentication. +The parameter is a tuple containing a code, as given by the server, +and a description of the +error. The possible errors, according to the RFC, are: + +* `0x01` - General SOCKS server failure - If for any reason the proxy server is unable to +fulfill your request (internal server error). +* `0x02` - connection not allowed by ruleset - If the address you're trying to connect to +is blacklisted on the server or requires authentication. +* `0x03` - Network unreachable - The target could not be contacted. A router on the network +had replied with a destination net unreachable error. +* `0x04` - Host unreachable - The target could not be contacted. A router on the network +had replied with a destination host unreachable error. +* `0x05` - Connection refused - The target server has actively refused the connection +(the requested port is closed). +* `0x06` - TTL expired - The TTL value of the SYN packet from the proxy to the target server +has expired. This usually means that there are network problems causing the packet +to be caught in a router-to-router "ping-pong". +* `0x07` - Command not supported - For instance if the server does not support UDP. +* `0x08` - Address type not supported - The client has provided an invalid address type. +When using this module, this error should not occur. + +class `SOCKS4Error` - This will be raised for SOCKS4 errors. The parameter is a tuple +containing a code and a description of the error, as given by the server. The +possible error, according to the specification are: + +* `0x5B` - Request rejected or failed - Will be raised in the event of an failure for any +reason other then the two mentioned next. +* `0x5C` - request rejected because SOCKS server cannot connect to identd on the client - +The Socks server had tried an ident lookup on your computer and has failed. In this +case you should run an identd server and/or configure your firewall to allow incoming +connections to local port 113 from the remote server. +* `0x5D` - request rejected because the client program and identd report different user-ids - +The Socks server had performed an ident lookup on your computer and has received a +different userid than the one you have provided. Change your userid (through the +username parameter of the set_proxy method) to match and try again. + +class `HTTPError` - This will be raised for HTTP errors. The message will contain +the HTTP status code and provided error message. + +After establishing the connection, the object behaves like a standard socket. +Methods like `makefile()` and `settimeout()` should behave just like regular sockets. +Call the `close()` method to close the connection. + +In addition to the `socksocket` class, an additional function worth mentioning is the +`set_default_proxy` function. The parameters are the same as the `set_proxy` method. +This function will set default proxy settings for newly created `socksocket` objects, +in which the proxy settings haven't been changed via the `set_proxy` method. +This is quite useful if you wish to force 3rd party modules to use a SOCKS proxy, +by overriding the socket object. +For example: + + >>> socks.set_default_proxy(socks.SOCKS5, "socks.example.com") + >>> socket.socket = socks.socksocket + >>> urllib.urlopen("http://www.sourceforge.net/") + + +PROBLEMS +--------- + +Please open a GitHub issue at https://github.com/Anorov/PySocks diff --git a/src/lib/PySocks/__init__.py b/src/lib/PySocks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/PySocks/setup.py b/src/lib/PySocks/setup.py new file mode 100644 index 00000000..9db0f3d2 --- /dev/null +++ b/src/lib/PySocks/setup.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +from distutils.core import setup + +VERSION = "1.5.3" + +setup( + name = "PySocks", + version = VERSION, + description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information.", + url = "https://github.com/Anorov/PySocks", + license = "BSD", + author = "Anorov", + author_email = "anorov.vorona@gmail.com", + keywords = ["socks", "proxy"], + py_modules=["socks", "sockshandler"] +) + diff --git a/src/lib/PySocks/socks.py b/src/lib/PySocks/socks.py new file mode 100644 index 00000000..ad1e9780 --- /dev/null +++ b/src/lib/PySocks/socks.py @@ -0,0 +1,699 @@ +""" +SocksiPy - Python SOCKS module. +Version 1.5.3 + +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. + + +This module provides a standard socket-like interface for Python +for tunneling connections through SOCKS proxies. + +=============================================================================== + +Minor modifications made by Christopher Gilbert (http://motomastyle.com/) +for use in PyLoris (http://pyloris.sourceforge.net/) + +Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) +mainly to merge bug fixes found in Sourceforge + +Modifications made by Anorov (https://github.com/Anorov) +-Forked and renamed to PySocks +-Fixed issue with HTTP proxy failure checking (same bug that was in the old ___recvall() method) +-Included SocksiPyHandler (sockshandler.py), to be used as a urllib2 handler, + courtesy of e000 (https://github.com/e000): https://gist.github.com/869791#file_socksipyhandler.py +-Re-styled code to make it readable + -Aliased PROXY_TYPE_SOCKS5 -> SOCKS5 etc. + -Improved exception handling and output + -Removed irritating use of sequence indexes, replaced with tuple unpacked variables + -Fixed up Python 3 bytestring handling - chr(0x03).encode() -> b"\x03" + -Other general fixes +-Added clarification that the HTTP proxy connection method only supports CONNECT-style tunneling HTTP proxies +-Various small bug fixes +""" + +__version__ = "1.5.3" + +import socket +import struct +from errno import EOPNOTSUPP, EINVAL, EAGAIN +from io import BytesIO +from os import SEEK_CUR +from collections import Callable + +PROXY_TYPE_SOCKS4 = SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = SOCKS5 = 2 +PROXY_TYPE_HTTP = HTTP = 3 + +PROXY_TYPES = {"SOCKS4": SOCKS4, "SOCKS5": SOCKS5, "HTTP": HTTP} +PRINTABLE_PROXY_TYPES = dict(zip(PROXY_TYPES.values(), PROXY_TYPES.keys())) + +_orgsocket = _orig_socket = socket.socket + +class ProxyError(IOError): + """ + socket_err contains original socket.error exception. + """ + def __init__(self, msg, socket_err=None): + self.msg = msg + self.socket_err = socket_err + + if socket_err: + self.msg += ": {0}".format(socket_err) + + def __str__(self): + return self.msg + +class GeneralProxyError(ProxyError): pass +class ProxyConnectionError(ProxyError): pass +class SOCKS5AuthError(ProxyError): pass +class SOCKS5Error(ProxyError): pass +class SOCKS4Error(ProxyError): pass +class HTTPError(ProxyError): pass + +SOCKS4_ERRORS = { 0x5B: "Request rejected or failed", + 0x5C: "Request rejected because SOCKS server cannot connect to identd on the client", + 0x5D: "Request rejected because the client program and identd report different user-ids" + } + +SOCKS5_ERRORS = { 0x01: "General SOCKS server failure", + 0x02: "Connection not allowed by ruleset", + 0x03: "Network unreachable", + 0x04: "Host unreachable", + 0x05: "Connection refused", + 0x06: "TTL expired", + 0x07: "Command not supported, or protocol error", + 0x08: "Address type not supported" + } + +DEFAULT_PORTS = { SOCKS4: 1080, + SOCKS5: 1080, + HTTP: 8080 + } + +def set_default_proxy(proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None): + """ + set_default_proxy(proxy_type, addr[, port[, rdns[, username, password]]]) + + Sets a default proxy which all further socksocket objects will use, + unless explicitly changed. All parameters are as for socket.set_proxy(). + """ + socksocket.default_proxy = (proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + +setdefaultproxy = set_default_proxy + +def get_default_proxy(): + """ + Returns the default proxy, set by set_default_proxy. + """ + return socksocket.default_proxy + +getdefaultproxy = get_default_proxy + +def wrap_module(module): + """ + Attempts to replace a module's socket library with a SOCKS socket. Must set + a default proxy using set_default_proxy(...) first. + This will only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category. + """ + if socksocket.default_proxy: + module.socket.socket = socksocket + else: + raise GeneralProxyError("No default proxy specified") + +wrapmodule = wrap_module + +def create_connection(dest_pair, proxy_type=None, proxy_addr=None, + proxy_port=None, proxy_username=None, + proxy_password=None, timeout=None, + source_address=None): + """create_connection(dest_pair, *[, timeout], **proxy_args) -> socket object + + Like socket.create_connection(), but connects to proxy + before returning the socket object. + + dest_pair - 2-tuple of (IP/hostname, port). + **proxy_args - Same args passed to socksocket.set_proxy() if present. + timeout - Optional socket timeout value, in seconds. + source_address - tuple (host, port) for the socket to bind to as its source + address before connecting (only for compatibility) + """ + sock = socksocket() + if isinstance(timeout, (int, float)): + sock.settimeout(timeout) + if proxy_type is not None: + sock.set_proxy(proxy_type, proxy_addr, proxy_port, + proxy_username, proxy_password) + sock.connect(dest_pair) + return sock + +class _BaseSocket(socket.socket): + """Allows Python 2's "delegated" methods such as send() to be overridden + """ + def __init__(self, *pos, **kw): + _orig_socket.__init__(self, *pos, **kw) + + self._savedmethods = dict() + for name in self._savenames: + self._savedmethods[name] = getattr(self, name) + delattr(self, name) # Allows normal overriding mechanism to work + + _savenames = list() + +def _makemethod(name): + return lambda self, *pos, **kw: self._savedmethods[name](*pos, **kw) +for name in ("sendto", "send", "recvfrom", "recv"): + method = getattr(_BaseSocket, name, None) + + # Determine if the method is not defined the usual way + # as a function in the class. + # Python 2 uses __slots__, so there are descriptors for each method, + # but they are not functions. + if not isinstance(method, Callable): + _BaseSocket._savenames.append(name) + setattr(_BaseSocket, name, _makemethod(name)) + +class socksocket(_BaseSocket): + """socksocket([family[, type[, proto]]]) -> socket object + + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET and proto=0. + The "type" argument must be either SOCK_STREAM or SOCK_DGRAM. + """ + + default_proxy = None + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): + if type not in (socket.SOCK_STREAM, socket.SOCK_DGRAM): + msg = "Socket type must be stream or datagram, not {!r}" + raise ValueError(msg.format(type)) + + _BaseSocket.__init__(self, family, type, proto, _sock) + self._proxyconn = None # TCP connection to keep UDP relay alive + + if self.default_proxy: + self.proxy = self.default_proxy + else: + self.proxy = (None, None, None, None, None, None) + self.proxy_sockname = None + self.proxy_peername = None + + def _readall(self, file, count): + """ + Receive EXACTLY the number of bytes requested from the file object. + Blocks until the required number of bytes have been received. + """ + data = b"" + while len(data) < count: + d = file.read(count - len(data)) + if not d: + raise GeneralProxyError("Connection closed unexpectedly") + data += d + return data + + def set_proxy(self, proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None): + """set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]]) + Sets the proxy to be used. + + proxy_type - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be performed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided. + """ + self.proxy = (proxy_type, addr, port, rdns, + username.encode() if username else None, + password.encode() if password else None) + + setproxy = set_proxy + + def bind(self, *pos, **kw): + """ + Implements proxy connection for UDP sockets, + which happens during the bind() phase. + """ + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + if not proxy_type or self.type != socket.SOCK_DGRAM: + return _orig_socket.bind(self, *pos, **kw) + + if self._proxyconn: + raise socket.error(EINVAL, "Socket already bound to an address") + if proxy_type != SOCKS5: + msg = "UDP only supported by SOCKS5 proxy type" + raise socket.error(EOPNOTSUPP, msg) + _BaseSocket.bind(self, *pos, **kw) + + # Need to specify actual local port because + # some relays drop packets if a port of zero is specified. + # Avoid specifying host address in case of NAT though. + _, port = self.getsockname() + dst = ("0", port) + + self._proxyconn = _orig_socket() + proxy = self._proxy_addr() + self._proxyconn.connect(proxy) + + UDP_ASSOCIATE = b"\x03" + _, relay = self._SOCKS5_request(self._proxyconn, UDP_ASSOCIATE, dst) + + # The relay is most likely on the same host as the SOCKS proxy, + # but some proxies return a private IP address (10.x.y.z) + host, _ = proxy + _, port = relay + _BaseSocket.connect(self, (host, port)) + self.proxy_sockname = ("0.0.0.0", 0) # Unknown + + def sendto(self, bytes, *args, **kwargs): + if self.type != socket.SOCK_DGRAM: + return _BaseSocket.sendto(self, bytes, *args, **kwargs) + if not self._proxyconn: + self.bind(("", 0)) + + address = args[-1] + flags = args[:-1] + + header = BytesIO() + RSV = b"\x00\x00" + header.write(RSV) + STANDALONE = b"\x00" + header.write(STANDALONE) + self._write_SOCKS5_address(address, header) + + sent = _BaseSocket.send(self, header.getvalue() + bytes, *flags, **kwargs) + return sent - header.tell() + + def send(self, bytes, flags=0, **kwargs): + if self.type == socket.SOCK_DGRAM: + return self.sendto(bytes, flags, self.proxy_peername, **kwargs) + else: + return _BaseSocket.send(self, bytes, flags, **kwargs) + + def recvfrom(self, bufsize, flags=0): + if self.type != socket.SOCK_DGRAM: + return _BaseSocket.recvfrom(self, bufsize, flags) + if not self._proxyconn: + self.bind(("", 0)) + + buf = BytesIO(_BaseSocket.recv(self, bufsize, flags)) + buf.seek(+2, SEEK_CUR) + frag = buf.read(1) + if ord(frag): + raise NotImplementedError("Received UDP packet fragment") + fromhost, fromport = self._read_SOCKS5_address(buf) + + if self.proxy_peername: + peerhost, peerport = self.proxy_peername + if fromhost != peerhost or peerport not in (0, fromport): + raise socket.error(EAGAIN, "Packet filtered") + + return (buf.read(), (fromhost, fromport)) + + def recv(self, *pos, **kw): + bytes, _ = self.recvfrom(*pos, **kw) + return bytes + + def close(self): + if self._proxyconn: + self._proxyconn.close() + return _BaseSocket.close(self) + + def get_proxy_sockname(self): + """ + Returns the bound IP address and port number at the proxy. + """ + return self.proxy_sockname + + getproxysockname = get_proxy_sockname + + def get_proxy_peername(self): + """ + Returns the IP and port number of the proxy. + """ + return _BaseSocket.getpeername(self) + + getproxypeername = get_proxy_peername + + def get_peername(self): + """ + Returns the IP address and port number of the destination + machine (note: get_proxy_peername returns the proxy) + """ + return self.proxy_peername + + getpeername = get_peername + + def _negotiate_SOCKS5(self, *dest_addr): + """ + Negotiates a stream connection through a SOCKS5 server. + """ + CONNECT = b"\x01" + self.proxy_peername, self.proxy_sockname = self._SOCKS5_request(self, + CONNECT, dest_addr) + + def _SOCKS5_request(self, conn, cmd, dst): + """ + Send SOCKS5 request with given command (CMD field) and + address (DST field). Returns resolved DST address that was used. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = conn.makefile("wb") + reader = conn.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # First we'll send the authentication packages we support. + if username and password: + # The username/password details were supplied to the + # set_proxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + writer.write(b"\x05\x02\x00\x02") + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + writer.write(b"\x05\x01\x00") + + # We'll receive the server's response to determine which + # method was selected + writer.flush() + chosen_auth = self._readall(reader, 2) + + if chosen_auth[0:1] != b"\x05": + # Note: string[i:i+1] is used because indexing of a bytestring + # via bytestring[i] yields an integer in Python 3 + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + # Check the chosen authentication method + + if chosen_auth[1:2] == b"\x02": + # Okay, we need to perform a basic username/password + # authentication. + writer.write(b"\x01" + chr(len(username)).encode() + + username + + chr(len(password)).encode() + + password) + writer.flush() + auth_status = self._readall(reader, 2) + if auth_status[0:1] != b"\x01": + # Bad response + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + if auth_status[1:2] != b"\x00": + # Authentication failed + raise SOCKS5AuthError("SOCKS5 authentication failed") + + # Otherwise, authentication succeeded + + # No authentication is required if 0x00 + elif chosen_auth[1:2] != b"\x00": + # Reaching here is always bad + if chosen_auth[1:2] == b"\xFF": + raise SOCKS5AuthError("All offered SOCKS5 authentication methods were rejected") + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + # Now we can request the actual connection + writer.write(b"\x05" + cmd + b"\x00") + resolved = self._write_SOCKS5_address(dst, writer) + writer.flush() + + # Get the response + resp = self._readall(reader, 3) + if resp[0:1] != b"\x05": + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x00: + # Connection failed: server returned an error + error = SOCKS5_ERRORS.get(status, "Unknown error") + raise SOCKS5Error("{0:#04x}: {1}".format(status, error)) + + # Get the bound address/port + bnd = self._read_SOCKS5_address(reader) + return (resolved, bnd) + finally: + reader.close() + writer.close() + + def _write_SOCKS5_address(self, addr, file): + """ + Return the host and port packed for the SOCKS5 protocol, + and the resolved address as a tuple object. + """ + host, port = addr + proxy_type, _, _, rdns, username, password = self.proxy + + # If the given destination address is an IP address, we'll + # use the IPv4 address request even if remote resolving was specified. + try: + addr_bytes = socket.inet_aton(host) + file.write(b"\x01" + addr_bytes) + host = socket.inet_ntoa(addr_bytes) + except socket.error: + # Well it's not an IP number, so it's probably a DNS name. + if rdns: + # Resolve remotely + host_bytes = host.encode('idna') + file.write(b"\x03" + chr(len(host_bytes)).encode() + host_bytes) + else: + # Resolve locally + addr_bytes = socket.inet_aton(socket.gethostbyname(host)) + file.write(b"\x01" + addr_bytes) + host = socket.inet_ntoa(addr_bytes) + + file.write(struct.pack(">H", port)) + return host, port + + def _read_SOCKS5_address(self, file): + atyp = self._readall(file, 1) + if atyp == b"\x01": + addr = socket.inet_ntoa(self._readall(file, 4)) + elif atyp == b"\x03": + length = self._readall(file, 1) + addr = self._readall(file, ord(length)) + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + port = struct.unpack(">H", self._readall(file, 2))[0] + return addr, port + + def _negotiate_SOCKS4(self, dest_addr, dest_port): + """ + Negotiates a connection through a SOCKS4 server. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = self.makefile("wb") + reader = self.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # Check if the destination address provided is an IP address + remote_resolve = False + try: + addr_bytes = socket.inet_aton(dest_addr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if rdns: + addr_bytes = b"\x00\x00\x00\x01" + remote_resolve = True + else: + addr_bytes = socket.inet_aton(socket.gethostbyname(dest_addr)) + + # Construct the request packet + writer.write(struct.pack(">BBH", 0x04, 0x01, dest_port)) + writer.write(addr_bytes) + + # The username parameter is considered userid for SOCKS4 + if username: + writer.write(username) + writer.write(b"\x00") + + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if remote_resolve: + writer.write(dest_addr.encode('idna') + b"\x00") + writer.flush() + + # Get the response from the server + resp = self._readall(reader, 8) + if resp[0:1] != b"\x00": + # Bad data + raise GeneralProxyError("SOCKS4 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x5A: + # Connection failed: server returned an error + error = SOCKS4_ERRORS.get(status, "Unknown error") + raise SOCKS4Error("{0:#04x}: {1}".format(status, error)) + + # Get the bound address/port + self.proxy_sockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) + if remote_resolve: + self.proxy_peername = socket.inet_ntoa(addr_bytes), dest_port + else: + self.proxy_peername = dest_addr, dest_port + finally: + reader.close() + writer.close() + + def _negotiate_HTTP(self, dest_addr, dest_port): + """ + Negotiates a connection through an HTTP server. + NOTE: This currently only supports HTTP CONNECT-style proxies. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + # If we need to resolve locally, we do this now + addr = dest_addr if rdns else socket.gethostbyname(dest_addr) + + self.sendall(b"CONNECT " + addr.encode('idna') + b":" + str(dest_port).encode() + + b" HTTP/1.1\r\n" + b"Host: " + dest_addr.encode('idna') + b"\r\n\r\n") + + # We just need the first line to check if the connection was successful + fobj = self.makefile() + status_line = fobj.readline() + fobj.close() + + if not status_line: + raise GeneralProxyError("Connection closed unexpectedly") + + try: + proto, status_code, status_msg = status_line.split(" ", 2) + except ValueError: + raise GeneralProxyError("HTTP proxy server sent invalid response") + + if not proto.startswith("HTTP/"): + raise GeneralProxyError("Proxy server does not appear to be an HTTP proxy") + + try: + status_code = int(status_code) + except ValueError: + raise HTTPError("HTTP proxy server did not return a valid HTTP status") + + if status_code != 200: + error = "{0}: {1}".format(status_code, status_msg) + if status_code in (400, 403, 405): + # It's likely that the HTTP proxy server does not support the CONNECT tunneling method + error += ("\n[*] Note: The HTTP proxy server may not be supported by PySocks" + " (must be a CONNECT tunnel proxy)") + raise HTTPError(error) + + self.proxy_sockname = (b"0.0.0.0", 0) + self.proxy_peername = addr, dest_port + + _proxy_negotiators = { + SOCKS4: _negotiate_SOCKS4, + SOCKS5: _negotiate_SOCKS5, + HTTP: _negotiate_HTTP + } + + + def connect(self, dest_pair): + """ + Connects to the specified destination through a proxy. + Uses the same API as socket's connect(). + To select the proxy server, use set_proxy(). + + dest_pair - 2-tuple of (IP/hostname, port). + """ + dest_addr, dest_port = dest_pair + + if self.type == socket.SOCK_DGRAM: + if not self._proxyconn: + self.bind(("", 0)) + dest_addr = socket.gethostbyname(dest_addr) + + # If the host address is INADDR_ANY or similar, reset the peer + # address so that packets are received from any peer + if dest_addr == "0.0.0.0" and not dest_port: + self.proxy_peername = None + else: + self.proxy_peername = (dest_addr, dest_port) + return + + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + + # Do a minimal input check first + if (not isinstance(dest_pair, (list, tuple)) + or len(dest_pair) != 2 + or not dest_addr + or not isinstance(dest_port, int)): + raise GeneralProxyError("Invalid destination-connection (host, port) pair") + + + if proxy_type is None: + # Treat like regular socket object + self.proxy_peername = dest_pair + _BaseSocket.connect(self, (dest_addr, dest_port)) + return + + proxy_addr = self._proxy_addr() + + try: + # Initial connection to proxy server + _BaseSocket.connect(self, proxy_addr) + + except socket.error as error: + # Error while connecting to proxy + self.close() + proxy_addr, proxy_port = proxy_addr + proxy_server = "{0}:{1}".format(proxy_addr, proxy_port) + printable_type = PRINTABLE_PROXY_TYPES[proxy_type] + + msg = "Error connecting to {0} proxy {1}".format(printable_type, + proxy_server) + raise ProxyConnectionError(msg, error) + + else: + # Connected to proxy server, now negotiate + try: + # Calls negotiate_{SOCKS4, SOCKS5, HTTP} + negotiate = self._proxy_negotiators[proxy_type] + negotiate(self, dest_addr, dest_port) + except socket.error as error: + # Wrap socket errors + self.close() + raise GeneralProxyError("Socket error", error) + except ProxyError: + # Protocol error while negotiating with proxy + self.close() + raise + + def _proxy_addr(self): + """ + Return proxy address to connect to as tuple object + """ + proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy + proxy_port = proxy_port or DEFAULT_PORTS.get(proxy_type) + if not proxy_port: + raise GeneralProxyError("Invalid proxy type") + return proxy_addr, proxy_port diff --git a/src/lib/PySocks/sockshandler.py b/src/lib/PySocks/sockshandler.py new file mode 100644 index 00000000..26c83439 --- /dev/null +++ b/src/lib/PySocks/sockshandler.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +""" +SocksiPy + urllib2 handler + +version: 0.3 +author: e + +This module provides a Handler which you can use with urllib2 to allow it to tunnel your connection through a socks.sockssocket socket, with out monkey patching the original socket... +""" +import ssl + +try: + import urllib2 + import httplib +except ImportError: # Python 3 + import urllib.request as urllib2 + import http.client as httplib + +import socks # $ pip install PySocks + +def merge_dict(a, b): + d = a.copy() + d.update(b) + return d + +class SocksiPyConnection(httplib.HTTPConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPConnection.__init__(self, *args, **kwargs) + + def connect(self): + self.sock = socks.socksocket() + self.sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + self.sock.settimeout(self.timeout) + self.sock.connect((self.host, self.port)) + +class SocksiPyConnectionS(httplib.HTTPSConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPSConnection.__init__(self, *args, **kwargs) + + def connect(self): + sock = socks.socksocket() + sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) + +class SocksiPyHandler(urllib2.HTTPHandler, urllib2.HTTPSHandler): + def __init__(self, *args, **kwargs): + self.args = args + self.kw = kwargs + urllib2.HTTPHandler.__init__(self) + + def http_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnection(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + + def https_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnectionS(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + +if __name__ == "__main__": + import sys + try: + port = int(sys.argv[1]) + except (ValueError, IndexError): + port = 9050 + opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "localhost", port)) + print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode()) + print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode()) diff --git a/src/lib/bencode/__init__.py b/src/lib/bencode/__init__.py new file mode 100644 index 00000000..c484c651 --- /dev/null +++ b/src/lib/bencode/__init__.py @@ -0,0 +1,7 @@ +try: + string_type = basestring +except NameError: + string_type = str + +from .encode import encode +from .decode import decode diff --git a/src/lib/bencode/decode.py b/src/lib/bencode/decode.py new file mode 100644 index 00000000..65362490 --- /dev/null +++ b/src/lib/bencode/decode.py @@ -0,0 +1,141 @@ +import itertools +import collections + +from . import string_type + +try: + range = xrange +except NameError: + pass + +def decode(data): + ''' + Bdecodes data into Python built-in types. + ''' + + return consume(LookaheadIterator(data)) + +class LookaheadIterator(collections.Iterator): + ''' + An iterator that lets you peek at the next item. + ''' + + def __init__(self, iterator): + self.iterator, self.next_iterator = itertools.tee(iter(iterator)) + + # Be one step ahead + self._advance() + + def _advance(self): + self.next_item = next(self.next_iterator, None) + + def __next__(self): + self._advance() + + return next(self.iterator) + + # Python 2 compatibility + next = __next__ + +def consume(stream): + item = stream.next_item + + if item is None: + raise ValueError('Encoding empty data is undefined') + elif item == 'i': + return consume_int(stream) + elif item == 'l': + return consume_list(stream) + elif item == 'd': + return consume_dict(stream) + elif item is not None and item[0].isdigit(): + return consume_str(stream) + else: + raise ValueError('Invalid bencode object type: ', item) + +def consume_number(stream): + result = '' + + while True: + chunk = stream.next_item + + if not chunk.isdigit(): + return result + elif result.startswith('0'): + raise ValueError('Invalid number') + + next(stream) + result += chunk + +def consume_int(stream): + if next(stream) != 'i': + raise ValueError() + + negative = stream.next_item == '-' + + if negative: + next(stream) + + result = int(consume_number(stream)) + + if negative: + result *= -1 + + if result == 0: + raise ValueError('Negative zero is not allowed') + + if next(stream) != 'e': + raise ValueError('Unterminated integer') + + return result + +def consume_str(stream): + length = int(consume_number(stream)) + + if next(stream) != ':': + raise ValueError('Malformed string') + + result = '' + + for i in range(length): + try: + result += next(stream) + except StopIteration: + raise ValueError('Invalid string length') + + return result + +def consume_list(stream): + if next(stream) != 'l': + raise ValueError() + + l = [] + + while stream.next_item != 'e': + l.append(consume(stream)) + + if next(stream) != 'e': + raise ValueError('Unterminated list') + + return l + +def consume_dict(stream): + if next(stream) != 'd': + raise ValueError() + + d = {} + + while stream.next_item != 'e': + key = consume(stream) + + if not isinstance(key, string_type): + raise ValueError('Dictionary keys must be strings') + + value = consume(stream) + + d[key] = value + + if next(stream) != 'e': + raise ValueError('Unterminated dictionary') + + return d diff --git a/src/lib/bencode/encode.py b/src/lib/bencode/encode.py new file mode 100644 index 00000000..be98f994 --- /dev/null +++ b/src/lib/bencode/encode.py @@ -0,0 +1,22 @@ +from . import string_type + +def encode(obj): + ''' + Bencodes the object. The object must be an instance of: str, int, list, or dict. + ''' + + if isinstance(obj, string_type): + return '{0}:{1}'.format(len(obj), obj) + elif isinstance(obj, int): + return 'i{0}e'.format(obj) + elif isinstance(obj, list): + values = ''.join([encode(o) for o in obj]) + + return 'l{0}e'.format(values) + elif isinstance(obj, dict): + items = sorted(obj.items()) + values = ''.join([encode(str(key)) + encode(value) for key, value in items]) + + return 'd{0}e'.format(values) + else: + raise TypeError('Unsupported type: {0}. Must be one of: str, int, list, dict.'.format(type(obj))) diff --git a/src/lib/bencode_open/LICENSE b/src/lib/bencode_open/LICENSE deleted file mode 100644 index f0e46d71..00000000 --- a/src/lib/bencode_open/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Ivan Machugovskiy - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/src/lib/bencode_open/__init__.py b/src/lib/bencode_open/__init__.py deleted file mode 100644 index e3c783cc..00000000 --- a/src/lib/bencode_open/__init__.py +++ /dev/null @@ -1,160 +0,0 @@ -def loads(data): - if not isinstance(data, bytes): - raise TypeError("Expected 'bytes' object, got {}".format(type(data))) - - offset = 0 - - - def parseInteger(): - nonlocal offset - - offset += 1 - had_digit = False - abs_value = 0 - - sign = 1 - if data[offset] == ord("-"): - sign = -1 - offset += 1 - while offset < len(data): - if data[offset] == ord("e"): - # End of string - offset += 1 - if not had_digit: - raise ValueError("Integer without value") - break - if ord("0") <= data[offset] <= ord("9"): - abs_value = abs_value * 10 + int(chr(data[offset])) - had_digit = True - offset += 1 - else: - raise ValueError("Invalid integer") - else: - raise ValueError("Unexpected EOF, expected integer") - - if not had_digit: - raise ValueError("Empty integer") - - return sign * abs_value - - - def parseString(): - nonlocal offset - - length = int(chr(data[offset])) - offset += 1 - - while offset < len(data): - if data[offset] == ord(":"): - offset += 1 - break - if ord("0") <= data[offset] <= ord("9"): - length = length * 10 + int(chr(data[offset])) - offset += 1 - else: - raise ValueError("Invalid string length") - else: - raise ValueError("Unexpected EOF, expected string contents") - - if offset + length > len(data): - raise ValueError("Unexpected EOF, expected string contents") - offset += length - - return data[offset - length:offset] - - - def parseList(): - nonlocal offset - - offset += 1 - values = [] - - while offset < len(data): - if data[offset] == ord("e"): - # End of list - offset += 1 - return values - else: - values.append(parse()) - - raise ValueError("Unexpected EOF, expected list contents") - - - def parseDict(): - nonlocal offset - - offset += 1 - items = {} - - while offset < len(data): - if data[offset] == ord("e"): - # End of list - offset += 1 - return items - else: - key, value = parse(), parse() - if not isinstance(key, bytes): - raise ValueError("A dict key must be a byte string") - if key in items: - raise ValueError("Duplicate dict key: {}".format(key)) - items[key] = value - - raise ValueError("Unexpected EOF, expected dict contents") - - - def parse(): - nonlocal offset - - if data[offset] == ord("i"): - return parseInteger() - elif data[offset] == ord("l"): - return parseList() - elif data[offset] == ord("d"): - return parseDict() - elif ord("0") <= data[offset] <= ord("9"): - return parseString() - - raise ValueError("Unknown type specifier: '{}'".format(chr(data[offset]))) - - result = parse() - - if offset != len(data): - raise ValueError("Expected EOF, got {} bytes left".format(len(data) - offset)) - - return result - - -def dumps(data): - result = bytearray() - - - def convert(data): - nonlocal result - - if isinstance(data, str): - raise ValueError("bencode only supports bytes, not str. Use encode") - - if isinstance(data, bytes): - result += str(len(data)).encode() + b":" + data - elif isinstance(data, int): - result += b"i" + str(data).encode() + b"e" - elif isinstance(data, list): - result += b"l" - for val in data: - convert(val) - result += b"e" - elif isinstance(data, dict): - result += b"d" - for key in sorted(data.keys()): - if not isinstance(key, bytes): - raise ValueError("Dict key can only be bytes, not {}".format(type(key))) - convert(key) - convert(data[key]) - result += b"e" - else: - raise ValueError("bencode only supports bytes, int, list and dict") - - - convert(data) - - return bytes(result) diff --git a/src/lib/cssvendor/cssvendor.py b/src/lib/cssvendor/cssvendor.py index b04d7cc3..e2ca6788 100644 --- a/src/lib/cssvendor/cssvendor.py +++ b/src/lib/cssvendor/cssvendor.py @@ -3,26 +3,26 @@ import re def prefix(content): content = re.sub( - b"@keyframes (.*? {.*?}\s*})", b"@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", + "@keyframes (.*? {.*?}\s*})", "@keyframes \\1\n@-webkit-keyframes \\1\n@-moz-keyframes \\1\n", content, flags=re.DOTALL ) content = re.sub( - b'([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' + - b'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', - b'\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content + '([^-\*])(border-radius|box-shadow|appearance|transition|animation|box-sizing|' + + 'backface-visibility|transform|filter|perspective|animation-[a-z-]+): (.*?)([;}])', + '\\1-webkit-\\2: \\3; -moz-\\2: \\3; -o-\\2: \\3; -ms-\\2: \\3; \\2: \\3 \\4', content ) content = re.sub( - b'(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', - b'\\1: -webkit-\\2(\\3);' + - b'\\1: -moz-\\2(\\3);' + - b'\\1: -o-\\2(\\3);' + - b'\\1: -ms-\\2(\\3);' + - b'\\1: \\2(\\3);', content + '(?<=[^a-zA-Z0-9-])([a-zA-Z0-9-]+): {0,1}(linear-gradient)\((.*?)(\)[;\n])', + '\\1: -webkit-\\2(\\3);' + + '\\1: -moz-\\2(\\3);' + + '\\1: -o-\\2(\\3);' + + '\\1: -ms-\\2(\\3);' + + '\\1: \\2(\\3);', content ) return content if __name__ == "__main__": - print(prefix(b""" + print prefix(""" .test { border-radius: 5px; background: linear-gradient(red, blue); @@ -36,4 +36,4 @@ if __name__ == "__main__": } - """).decode("utf8")) + """) diff --git a/src/lib/gevent_ws/__init__.py b/src/lib/gevent_ws/__init__.py deleted file mode 100644 index a157e94c..00000000 --- a/src/lib/gevent_ws/__init__.py +++ /dev/null @@ -1,279 +0,0 @@ -from gevent.pywsgi import WSGIHandler, _InvalidClientInput -from gevent.queue import Queue -import gevent -import hashlib -import base64 -import struct -import socket -import time -import sys - - -SEND_PACKET_SIZE = 1300 -OPCODE_TEXT = 1 -OPCODE_BINARY = 2 -OPCODE_CLOSE = 8 -OPCODE_PING = 9 -OPCODE_PONG = 10 -STATUS_OK = 1000 -STATUS_PROTOCOL_ERROR = 1002 -STATUS_DATA_ERROR = 1007 -STATUS_POLICY_VIOLATION = 1008 -STATUS_TOO_LONG = 1009 - - -class WebSocket: - def __init__(self, socket): - self.socket = socket - self.closed = False - self.status = None - self._receive_error = None - self._queue = Queue() - self.max_length = 10 * 1024 * 1024 - gevent.spawn(self._listen) - - - def set_max_message_length(self, length): - self.max_length = length - - - def _listen(self): - try: - while True: - fin = False - message = bytearray() - is_first_message = True - start_opcode = None - while not fin: - payload, opcode, fin = self._get_frame(max_length=self.max_length - len(message)) - # Make sure continuation frames have correct information - if not is_first_message and opcode != 0: - self._error(STATUS_PROTOCOL_ERROR) - if is_first_message: - if opcode not in (OPCODE_TEXT, OPCODE_BINARY): - self._error(STATUS_PROTOCOL_ERROR) - # Save opcode - start_opcode = opcode - message += payload - is_first_message = False - message = bytes(message) - if start_opcode == OPCODE_TEXT: # UTF-8 text - try: - message = message.decode() - except UnicodeDecodeError: - self._error(STATUS_DATA_ERROR) - self._queue.put(message) - except Exception as e: - self.closed = True - self._receive_error = e - self._queue.put(None) # To make sure the error is read - - - def receive(self): - if not self._queue.empty(): - return self.receive_nowait() - if isinstance(self._receive_error, EOFError): - return None - if self._receive_error: - raise self._receive_error - self._queue.peek() - return self.receive_nowait() - - - def receive_nowait(self): - ret = self._queue.get_nowait() - if self._receive_error and not isinstance(self._receive_error, EOFError): - raise self._receive_error - return ret - - - def send(self, data): - if self.closed: - raise EOFError() - if isinstance(data, str): - self._send_frame(OPCODE_TEXT, data.encode()) - elif isinstance(data, bytes): - self._send_frame(OPCODE_BINARY, data) - else: - raise TypeError("Expected str or bytes, got " + repr(type(data))) - - - # Reads a frame from the socket. Pings, pongs and close packets are handled - # automatically - def _get_frame(self, max_length): - while True: - payload, opcode, fin = self._read_frame(max_length=max_length) - if opcode == OPCODE_PING: - self._send_frame(OPCODE_PONG, payload) - elif opcode == OPCODE_PONG: - pass - elif opcode == OPCODE_CLOSE: - if len(payload) >= 2: - self.status = struct.unpack("!H", payload[:2])[0] - was_closed = self.closed - self.closed = True - if not was_closed: - # Send a close frame in response - self.close(STATUS_OK) - raise EOFError() - else: - return payload, opcode, fin - - - # Low-level function, use _get_frame instead - def _read_frame(self, max_length): - header = self._recv_exactly(2) - - if not (header[1] & 0x80): - self._error(STATUS_POLICY_VIOLATION) - - opcode = header[0] & 0xf - fin = bool(header[0] & 0x80) - - payload_length = header[1] & 0x7f - if payload_length == 126: - payload_length = struct.unpack("!H", self._recv_exactly(2))[0] - elif payload_length == 127: - payload_length = struct.unpack("!Q", self._recv_exactly(8))[0] - - # Control frames are handled in a special way - if opcode in (OPCODE_PING, OPCODE_PONG): - max_length = 125 - - if payload_length > max_length: - self._error(STATUS_TOO_LONG) - - mask = self._recv_exactly(4) - payload = self._recv_exactly(payload_length) - payload = self._unmask(payload, mask) - - return payload, opcode, fin - - - def _recv_exactly(self, length): - buf = bytearray() - while len(buf) < length: - block = self.socket.recv(min(4096, length - len(buf))) - if block == b"": - raise EOFError() - buf += block - return bytes(buf) - - - def _unmask(self, payload, mask): - def gen(c): - return bytes([x ^ c for x in range(256)]) - - - payload = bytearray(payload) - payload[0::4] = payload[0::4].translate(gen(mask[0])) - payload[1::4] = payload[1::4].translate(gen(mask[1])) - payload[2::4] = payload[2::4].translate(gen(mask[2])) - payload[3::4] = payload[3::4].translate(gen(mask[3])) - return bytes(payload) - - - def _send_frame(self, opcode, data): - for i in range(0, len(data), SEND_PACKET_SIZE): - part = data[i:i + SEND_PACKET_SIZE] - fin = int(i == (len(data) - 1) // SEND_PACKET_SIZE * SEND_PACKET_SIZE) - header = bytes( - [ - (opcode if i == 0 else 0) | (fin << 7), - min(len(part), 126) - ] - ) - if len(part) >= 126: - header += struct.pack("!H", len(part)) - self.socket.sendall(header + part) - - - def _error(self, status): - self.close(status) - raise EOFError() - - - def close(self, status=STATUS_OK): - self.closed = True - try: - self._send_frame(OPCODE_CLOSE, struct.pack("!H", status)) - except (BrokenPipeError, ConnectionResetError): - pass - self.socket.close() - - -class WebSocketHandler(WSGIHandler): - def handle_one_response(self): - self.time_start = time.time() - self.status = None - self.headers_sent = False - - self.result = None - self.response_use_chunked = False - self.response_length = 0 - - - http_connection = [s.strip().lower() for s in self.environ.get("HTTP_CONNECTION", "").split(",")] - if "upgrade" not in http_connection or self.environ.get("HTTP_UPGRADE", "").lower() != "websocket": - # Not my problem - return super(WebSocketHandler, self).handle_one_response() - - if "HTTP_SEC_WEBSOCKET_KEY" not in self.environ: - self.start_response("400 Bad Request", []) - return - - # Generate Sec-Websocket-Accept header - accept = self.environ["HTTP_SEC_WEBSOCKET_KEY"].encode() - accept += b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - accept = base64.b64encode(hashlib.sha1(accept).digest()).decode() - - # Accept - self.start_response("101 Switching Protocols", [ - ("Upgrade", "websocket"), - ("Connection", "Upgrade"), - ("Sec-Websocket-Accept", accept) - ])(b"") - - self.environ["wsgi.websocket"] = WebSocket(self.socket) - - # Can't call super because it sets invalid flags like "status" - try: - try: - self.run_application() - finally: - try: - self.wsgi_input._discard() - except (socket.error, IOError): - pass - except _InvalidClientInput: - self._send_error_response_if_possible(400) - except socket.error as ex: - if ex.args[0] in self.ignored_socket_errors: - self.close_connection = True - else: - self.handle_error(*sys.exc_info()) - except: # pylint:disable=bare-except - self.handle_error(*sys.exc_info()) - finally: - self.time_finish = time.time() - self.log_request() - self.close_connection = True - - - def process_result(self): - if "wsgi.websocket" in self.environ: - if self.result is None: - return - # Flushing result is required for werkzeug compatibility - for elem in self.result: - pass - else: - super(WebSocketHandler, self).process_result() - - - @property - def version(self): - if not self.environ: - return None - - return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION') diff --git a/src/lib/geventwebsocket/AUTHORS b/src/lib/geventwebsocket/AUTHORS new file mode 100644 index 00000000..02de7096 --- /dev/null +++ b/src/lib/geventwebsocket/AUTHORS @@ -0,0 +1,9 @@ +This Websocket library for Gevent is written and maintained by + + Jeffrey Gelens + + +Contributors: + + Denis Bilenko + Lon Ingram diff --git a/src/lib/geventwebsocket/LICENSE b/src/lib/geventwebsocket/LICENSE new file mode 100644 index 00000000..2526edb3 --- /dev/null +++ b/src/lib/geventwebsocket/LICENSE @@ -0,0 +1,13 @@ + Copyright 2011-2017 Jeffrey Gelens + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/lib/geventwebsocket/__init__.py b/src/lib/geventwebsocket/__init__.py new file mode 100644 index 00000000..5ee3f961 --- /dev/null +++ b/src/lib/geventwebsocket/__init__.py @@ -0,0 +1,21 @@ +VERSION = (0, 10, 1, 'final', 0) + +__all__ = [ + 'WebSocketApplication', + 'Resource', + 'WebSocketServer', + 'WebSocketError', + 'get_version' +] + + +def get_version(*args, **kwargs): + from .utils import get_version + return get_version(*args, **kwargs) + +try: + from .resource import WebSocketApplication, Resource + from .server import WebSocketServer + from .exceptions import WebSocketError +except ImportError: + pass diff --git a/src/lib/geventwebsocket/_compat.py b/src/lib/geventwebsocket/_compat.py new file mode 100644 index 00000000..70354135 --- /dev/null +++ b/src/lib/geventwebsocket/_compat.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import, division, print_function + +import sys +import codecs + + +PY3 = sys.version_info[0] == 3 +PY2 = sys.version_info[0] == 2 + + +if PY2: + bytes = str + text_type = unicode + string_types = basestring + range_type = xrange + iteritems = lambda x: x.iteritems() + # b = lambda x: x +else: + text_type = str + string_types = str, + range_type = range + iteritems = lambda x: iter(x.items()) + # b = lambda x: codecs.latin_1_encode(x)[0] diff --git a/src/lib/geventwebsocket/exceptions.py b/src/lib/geventwebsocket/exceptions.py new file mode 100644 index 00000000..e066727e --- /dev/null +++ b/src/lib/geventwebsocket/exceptions.py @@ -0,0 +1,19 @@ +from socket import error as socket_error + + +class WebSocketError(socket_error): + """ + Base class for all websocket errors. + """ + + +class ProtocolError(WebSocketError): + """ + Raised if an error occurs when de/encoding the websocket protocol. + """ + + +class FrameTooLargeException(ProtocolError): + """ + Raised if a frame is received that is too large. + """ diff --git a/src/lib/geventwebsocket/gunicorn/__init__.py b/src/lib/geventwebsocket/gunicorn/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/geventwebsocket/gunicorn/workers.py b/src/lib/geventwebsocket/gunicorn/workers.py new file mode 100644 index 00000000..d0aa1369 --- /dev/null +++ b/src/lib/geventwebsocket/gunicorn/workers.py @@ -0,0 +1,6 @@ +from geventwebsocket.handler import WebSocketHandler +from gunicorn.workers.ggevent import GeventPyWSGIWorker + + +class GeventWebSocketWorker(GeventPyWSGIWorker): + wsgi_handler = WebSocketHandler diff --git a/src/lib/geventwebsocket/handler.py b/src/lib/geventwebsocket/handler.py new file mode 100644 index 00000000..8aec77c0 --- /dev/null +++ b/src/lib/geventwebsocket/handler.py @@ -0,0 +1,283 @@ +import base64 +import hashlib + +from gevent.pywsgi import WSGIHandler +from ._compat import PY3 +from .websocket import WebSocket, Stream +from .logging import create_logger + + +class Client(object): + def __init__(self, address, ws): + self.address = address + self.ws = ws + + +class WebSocketHandler(WSGIHandler): + """ + Automatically upgrades the connection to a websocket. + + To prevent the WebSocketHandler to call the underlying WSGI application, + but only setup the WebSocket negotiations, do: + + mywebsockethandler.prevent_wsgi_call = True + + before calling run_application(). This is useful if you want to do more + things before calling the app, and want to off-load the WebSocket + negotiations to this library. Socket.IO needs this for example, to send + the 'ack' before yielding the control to your WSGI app. + """ + + SUPPORTED_VERSIONS = ('13', '8', '7') + GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" + + def run_websocket(self): + """ + Called when a websocket has been created successfully. + """ + + if getattr(self, 'prevent_wsgi_call', False): + return + + # In case WebSocketServer is not used + if not hasattr(self.server, 'clients'): + self.server.clients = {} + + # Since we're now a websocket connection, we don't care what the + # application actually responds with for the http response + + try: + self.server.clients[self.client_address] = Client( + self.client_address, self.websocket) + list(self.application(self.environ, lambda s, h, e=None: [])) + finally: + del self.server.clients[self.client_address] + if not self.websocket.closed: + self.websocket.close() + self.environ.update({ + 'wsgi.websocket': None + }) + self.websocket = None + + def run_application(self): + if (hasattr(self.server, 'pre_start_hook') and self.server.pre_start_hook): + self.logger.debug("Calling pre-start hook") + if self.server.pre_start_hook(self): + return super(WebSocketHandler, self).run_application() + + self.logger.debug("Initializing WebSocket") + self.result = self.upgrade_websocket() + + if hasattr(self, 'websocket'): + if self.status and not self.headers_sent: + self.write('') + + self.run_websocket() + else: + if self.status: + # A status was set, likely an error so just send the response + if not self.result: + self.result = [] + + self.process_result() + return + + # This handler did not handle the request, so defer it to the + # underlying application object + return super(WebSocketHandler, self).run_application() + + def upgrade_websocket(self): + """ + Attempt to upgrade the current environ into a websocket enabled + connection. If successful, the environ dict with be updated with two + new entries, `wsgi.websocket` and `wsgi.websocket_version`. + + :returns: Whether the upgrade was successful. + """ + + # Some basic sanity checks first + + self.logger.debug("Validating WebSocket request") + + if self.environ.get('REQUEST_METHOD', '') != 'GET': + # This is not a websocket request, so we must not handle it + self.logger.debug('Can only upgrade connection if using GET method.') + return + + upgrade = self.environ.get('HTTP_UPGRADE', '').lower() + + if upgrade == 'websocket': + connection = self.environ.get('HTTP_CONNECTION', '').lower() + + if 'upgrade' not in connection: + # This is not a websocket request, so we must not handle it + self.logger.warning("Client didn't ask for a connection " + "upgrade") + return + else: + # This is not a websocket request, so we must not handle it + return + + if self.request_version != 'HTTP/1.1': + self.start_response('402 Bad Request', []) + self.logger.warning("Bad server protocol in headers") + + return ['Bad protocol version'] + + if self.environ.get('HTTP_SEC_WEBSOCKET_VERSION'): + return self.upgrade_connection() + else: + self.logger.warning("No protocol defined") + self.start_response('426 Upgrade Required', [ + ('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))]) + + return ['No Websocket protocol version defined'] + + def upgrade_connection(self): + """ + Validate and 'upgrade' the HTTP request to a WebSocket request. + + If an upgrade succeeded then then handler will have `start_response` + with a status of `101`, the environ will also be updated with + `wsgi.websocket` and `wsgi.websocket_version` keys. + + :param environ: The WSGI environ dict. + :param start_response: The callable used to start the response. + :param stream: File like object that will be read from/written to by + the underlying WebSocket object, if created. + :return: The WSGI response iterator is something went awry. + """ + + self.logger.debug("Attempting to upgrade connection") + + version = self.environ.get("HTTP_SEC_WEBSOCKET_VERSION") + + if version not in self.SUPPORTED_VERSIONS: + msg = "Unsupported WebSocket Version: {0}".format(version) + + self.logger.warning(msg) + self.start_response('400 Bad Request', [ + ('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS)) + ]) + + return [msg] + + key = self.environ.get("HTTP_SEC_WEBSOCKET_KEY", '').strip() + + if not key: + # 5.2.1 (3) + msg = "Sec-WebSocket-Key header is missing/empty" + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + try: + key_len = len(base64.b64decode(key)) + except TypeError: + msg = "Invalid key: {0}".format(key) + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + if key_len != 16: + # 5.2.1 (3) + msg = "Invalid key: {0}".format(key) + + self.logger.warning(msg) + self.start_response('400 Bad Request', []) + + return [msg] + + # Check for WebSocket Protocols + requested_protocols = self.environ.get( + 'HTTP_SEC_WEBSOCKET_PROTOCOL', '') + protocol = None + + if hasattr(self.application, 'app_protocol'): + allowed_protocol = self.application.app_protocol( + self.environ['PATH_INFO']) + + if allowed_protocol and allowed_protocol in requested_protocols: + protocol = allowed_protocol + self.logger.debug("Protocol allowed: {0}".format(protocol)) + + self.websocket = WebSocket(self.environ, Stream(self), self) + self.environ.update({ + 'wsgi.websocket_version': version, + 'wsgi.websocket': self.websocket + }) + + if PY3: + accept = base64.b64encode( + hashlib.sha1((key + self.GUID).encode("latin-1")).digest() + ).decode("latin-1") + else: + accept = base64.b64encode(hashlib.sha1(key + self.GUID).digest()) + + headers = [ + ("Upgrade", "websocket"), + ("Connection", "Upgrade"), + ("Sec-WebSocket-Accept", accept) + ] + + if protocol: + headers.append(("Sec-WebSocket-Protocol", protocol)) + + self.logger.debug("WebSocket request accepted, switching protocols") + self.start_response("101 Switching Protocols", headers) + + @property + def logger(self): + if not hasattr(self.server, 'logger'): + self.server.logger = create_logger(__name__) + + return self.server.logger + + def log_request(self): + if '101' not in str(self.status): + self.logger.info(self.format_request()) + + @property + def active_client(self): + return self.server.clients[self.client_address] + + def start_response(self, status, headers, exc_info=None): + """ + Called when the handler is ready to send a response back to the remote + endpoint. A websocket connection may have not been created. + """ + writer = super(WebSocketHandler, self).start_response( + status, headers, exc_info=exc_info) + + self._prepare_response() + + return writer + + def _prepare_response(self): + """ + Sets up the ``pywsgi.Handler`` to work with a websocket response. + + This is used by other projects that need to support WebSocket + connections as part of a larger effort. + """ + assert not self.headers_sent + + if not self.environ.get('wsgi.websocket'): + # a WebSocket connection is not established, do nothing + return + + # So that `finalize_headers` doesn't write a Content-Length header + self.provided_content_length = False + + # The websocket is now controlling the response + self.response_use_chunked = False + + # Once the request is over, the connection must be closed + self.close_connection = True + + # Prevents the Date header from being written + self.provided_date = True diff --git a/src/lib/geventwebsocket/logging.py b/src/lib/geventwebsocket/logging.py new file mode 100644 index 00000000..ac0c9692 --- /dev/null +++ b/src/lib/geventwebsocket/logging.py @@ -0,0 +1,32 @@ +from __future__ import absolute_import + +from logging import getLogger, StreamHandler, getLoggerClass, Formatter, DEBUG, INFO + + +def create_logger(name, debug=False, format=None): + Logger = getLoggerClass() + + class DebugLogger(Logger): + def getEffectiveLevel(x): + if x.level == 0 and debug: + return DEBUG + else: + return Logger.getEffectiveLevel(x) + + class DebugHandler(StreamHandler): + def emit(x, record): + StreamHandler.emit(x, record) if debug else None + + handler = DebugHandler() + handler.setLevel(DEBUG) + + if format: + handler.setFormatter(Formatter(format)) + + logger = getLogger(name) + del logger.handlers[:] + logger.__class__ = DebugLogger + logger.addHandler(handler) + logger.setLevel(INFO) + + return logger diff --git a/src/lib/geventwebsocket/protocols/__init__.py b/src/lib/geventwebsocket/protocols/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/geventwebsocket/protocols/base.py b/src/lib/geventwebsocket/protocols/base.py new file mode 100644 index 00000000..1c05ab62 --- /dev/null +++ b/src/lib/geventwebsocket/protocols/base.py @@ -0,0 +1,35 @@ +class BaseProtocol(object): + PROTOCOL_NAME = '' + + def __init__(self, app): + self._app = app + + def on_open(self): + self.app.on_open() + + def on_message(self, message): + self.app.on_message(message) + + def on_close(self, reason=None): + self.app.on_close(reason) + + @property + def app(self): + if self._app: + return self._app + else: + raise Exception("No application coupled") + + @property + def server(self): + if not hasattr(self.app, 'ws'): + return None + + return self.app.ws.handler.server + + @property + def handler(self): + if not hasattr(self.app, 'ws'): + return None + + return self.app.ws.handler diff --git a/src/lib/geventwebsocket/protocols/wamp.py b/src/lib/geventwebsocket/protocols/wamp.py new file mode 100644 index 00000000..c89775be --- /dev/null +++ b/src/lib/geventwebsocket/protocols/wamp.py @@ -0,0 +1,235 @@ +import inspect +import random +import string +import types + +try: + import ujson as json +except ImportError: + try: + import simplejson as json + except ImportError: + import json + +from .._compat import range_type, string_types +from ..exceptions import WebSocketError +from .base import BaseProtocol + + +def export_rpc(arg=None): + if isinstance(arg, types.FunctionType): + arg._rpc = arg.__name__ + return arg + + +def serialize(data): + return json.dumps(data) + + +class Prefixes(object): + def __init__(self): + self.prefixes = {} + + def add(self, prefix, uri): + self.prefixes[prefix] = uri + + def resolve(self, curie_or_uri): + if "http://" in curie_or_uri: + return curie_or_uri + elif ':' in curie_or_uri: + prefix, proc = curie_or_uri.split(':', 1) + return self.prefixes[prefix] + proc + else: + raise Exception(curie_or_uri) + + +class RemoteProcedures(object): + def __init__(self): + self.calls = {} + + def register_procedure(self, uri, proc): + self.calls[uri] = proc + + def register_object(self, uri, obj): + for k in inspect.getmembers(obj, inspect.ismethod): + if '_rpc' in k[1].__dict__: + proc_uri = uri + k[1]._rpc + self.calls[proc_uri] = (obj, k[1]) + + def call(self, uri, args): + if uri in self.calls: + proc = self.calls[uri] + + # Do the correct call whether it's a function or instance method. + if isinstance(proc, tuple): + if proc[1].__self__ is None: + # Create instance of object and call method + return proc[1](proc[0](), *args) + else: + # Call bound method on instance + return proc[1](*args) + else: + return self.calls[uri](*args) + else: + raise Exception("no such uri '{}'".format(uri)) + + +class Channels(object): + def __init__(self): + self.channels = {} + + def create(self, uri, prefix_matching=False): + if uri not in self.channels: + self.channels[uri] = [] + + # TODO: implement prefix matching + + def subscribe(self, uri, client): + if uri in self.channels: + self.channels[uri].append(client) + + def unsubscribe(self, uri, client): + if uri not in self.channels: + return + + client_index = self.channels[uri].index(client) + self.channels[uri].pop(client_index) + + if len(self.channels[uri]) == 0: + del self.channels[uri] + + def publish(self, uri, event, exclude=None, eligible=None): + if uri not in self.channels: + return + + # TODO: exclude & eligible + + msg = [WampProtocol.MSG_EVENT, uri, event] + + for client in self.channels[uri]: + try: + client.ws.send(serialize(msg)) + except WebSocketError: + # Seems someone didn't unsubscribe before disconnecting + self.channels[uri].remove(client) + + +class WampProtocol(BaseProtocol): + MSG_WELCOME = 0 + MSG_PREFIX = 1 + MSG_CALL = 2 + MSG_CALL_RESULT = 3 + MSG_CALL_ERROR = 4 + MSG_SUBSCRIBE = 5 + MSG_UNSUBSCRIBE = 6 + MSG_PUBLISH = 7 + MSG_EVENT = 8 + + PROTOCOL_NAME = "wamp" + + def __init__(self, *args, **kwargs): + self.procedures = RemoteProcedures() + self.prefixes = Prefixes() + self.session_id = ''.join( + [random.choice(string.digits + string.letters) + for i in range_type(16)]) + + super(WampProtocol, self).__init__(*args, **kwargs) + + def register_procedure(self, *args, **kwargs): + self.procedures.register_procedure(*args, **kwargs) + + def register_object(self, *args, **kwargs): + self.procedures.register_object(*args, **kwargs) + + def register_pubsub(self, *args, **kwargs): + if not hasattr(self.server, 'channels'): + self.server.channels = Channels() + + self.server.channels.create(*args, **kwargs) + + def do_handshake(self): + from geventwebsocket import get_version + + welcome = [ + self.MSG_WELCOME, + self.session_id, + 1, + 'gevent-websocket/' + get_version() + ] + self.app.ws.send(serialize(welcome)) + + def _get_exception_info(self, e): + uri = 'http://TODO#generic' + desc = str(type(e)) + details = str(e) + return [uri, desc, details] + + def rpc_call(self, data): + call_id, curie_or_uri = data[1:3] + args = data[3:] + + if not isinstance(call_id, string_types): + raise Exception() + if not isinstance(curie_or_uri, string_types): + raise Exception() + + uri = self.prefixes.resolve(curie_or_uri) + + try: + result = self.procedures.call(uri, args) + result_msg = [self.MSG_CALL_RESULT, call_id, result] + except Exception as e: + result_msg = [self.MSG_CALL_ERROR, + call_id] + self._get_exception_info(e) + + self.app.on_message(serialize(result_msg)) + + def pubsub_action(self, data): + action = data[0] + curie_or_uri = data[1] + + if not isinstance(action, int): + raise Exception() + if not isinstance(curie_or_uri, string_types): + raise Exception() + + uri = self.prefixes.resolve(curie_or_uri) + + if action == self.MSG_SUBSCRIBE and len(data) == 2: + self.server.channels.subscribe(data[1], self.handler.active_client) + + elif action == self.MSG_UNSUBSCRIBE and len(data) == 2: + self.server.channels.unsubscribe( + data[1], self.handler.active_client) + + elif action == self.MSG_PUBLISH and len(data) >= 3: + payload = data[2] if len(data) >= 3 else None + exclude = data[3] if len(data) >= 4 else None + eligible = data[4] if len(data) >= 5 else None + + self.server.channels.publish(uri, payload, exclude, eligible) + + def on_open(self): + self.app.on_open() + self.do_handshake() + + def on_message(self, message): + data = json.loads(message) + + if not isinstance(data, list): + raise Exception('incoming data is no list') + + if data[0] == self.MSG_PREFIX and len(data) == 3: + prefix, uri = data[1:3] + self.prefixes.add(prefix, uri) + + elif data[0] == self.MSG_CALL and len(data) >= 3: + return self.rpc_call(data) + + elif data[0] in (self.MSG_SUBSCRIBE, self.MSG_UNSUBSCRIBE, + self.MSG_PUBLISH): + return self.pubsub_action(data) + else: + raise Exception("Unknown call") + diff --git a/src/lib/geventwebsocket/resource.py b/src/lib/geventwebsocket/resource.py new file mode 100644 index 00000000..549f0d32 --- /dev/null +++ b/src/lib/geventwebsocket/resource.py @@ -0,0 +1,100 @@ +import re +import warnings + +from .protocols.base import BaseProtocol +from .exceptions import WebSocketError + +try: + from collections import OrderedDict +except ImportError: + class OrderedDict: + pass + + +class WebSocketApplication(object): + protocol_class = BaseProtocol + + def __init__(self, ws): + self.protocol = self.protocol_class(self) + self.ws = ws + + def handle(self): + self.protocol.on_open() + + while True: + try: + message = self.ws.receive() + except WebSocketError: + self.protocol.on_close() + break + + self.protocol.on_message(message) + + def on_open(self, *args, **kwargs): + pass + + def on_close(self, *args, **kwargs): + pass + + def on_message(self, message, *args, **kwargs): + self.ws.send(message, **kwargs) + + @classmethod + def protocol_name(cls): + return cls.protocol_class.PROTOCOL_NAME + + +class Resource(object): + def __init__(self, apps=None): + self.apps = apps if apps else [] + + if isinstance(apps, dict): + if not isinstance(apps, OrderedDict): + warnings.warn("Using an unordered dictionary for the " + "app list is discouraged and may lead to " + "undefined behavior.", UserWarning) + + self.apps = apps.items() + + # An app can either be a standard WSGI application (an object we call with + # __call__(self, environ, start_response)) or a class we instantiate + # (and which can handle websockets). This function tells them apart. + # Override this if you have apps that can handle websockets but don't + # fulfill these criteria. + def _is_websocket_app(self, app): + return isinstance(app, type) and issubclass(app, WebSocketApplication) + + def _app_by_path(self, environ_path, is_websocket_request): + # Which app matched the current path? + for path, app in self.apps: + if re.match(path, environ_path): + if is_websocket_request == self._is_websocket_app(app): + return app + return None + + def app_protocol(self, path): + # app_protocol will only be called for websocket apps + app = self._app_by_path(path, True) + + if hasattr(app, 'protocol_name'): + return app.protocol_name() + else: + return '' + + def __call__(self, environ, start_response): + environ = environ + is_websocket_call = 'wsgi.websocket' in environ + current_app = self._app_by_path(environ['PATH_INFO'], is_websocket_call) + + if current_app is None: + raise Exception("No apps defined") + + if is_websocket_call: + ws = environ['wsgi.websocket'] + current_app = current_app(ws) + current_app.ws = ws # TODO: needed? + current_app.handle() + # Always return something, calling WSGI middleware may rely on it + return [] + else: + return current_app(environ, start_response) diff --git a/src/lib/geventwebsocket/server.py b/src/lib/geventwebsocket/server.py new file mode 100644 index 00000000..e939bd11 --- /dev/null +++ b/src/lib/geventwebsocket/server.py @@ -0,0 +1,34 @@ +from gevent.pywsgi import WSGIServer + +from .handler import WebSocketHandler +from .logging import create_logger + + +class WebSocketServer(WSGIServer): + handler_class = WebSocketHandler + debug_log_format = ( + '-' * 80 + '\n' + + '%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' + + '%(message)s\n' + + '-' * 80 + ) + + def __init__(self, *args, **kwargs): + self.debug = kwargs.pop('debug', False) + self.pre_start_hook = kwargs.pop('pre_start_hook', None) + self._logger = None + self.clients = {} + + super(WebSocketServer, self).__init__(*args, **kwargs) + + def handle(self, socket, address): + handler = self.handler_class(socket, address, self) + handler.handle() + + @property + def logger(self): + if not self._logger: + self._logger = create_logger( + __name__, self.debug, self.debug_log_format) + + return self._logger diff --git a/src/lib/geventwebsocket/utf8validator.py b/src/lib/geventwebsocket/utf8validator.py new file mode 100644 index 00000000..d604f966 --- /dev/null +++ b/src/lib/geventwebsocket/utf8validator.py @@ -0,0 +1,224 @@ +from ._compat import PY3 + +############################################################################### +# +# The MIT License (MIT) +# +# Copyright (c) Crossbar.io Technologies GmbH +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +############################################################################### + +# Note: This code is a Python implementation of the algorithm +# "Flexible and Economical UTF-8 Decoder" by Bjoern Hoehrmann +# bjoern@hoehrmann.de, http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + +__all__ = ("Utf8Validator",) + + +# DFA transitions +UTF8VALIDATOR_DFA = ( + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 00..1f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 20..3f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 40..5f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 60..7f + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, # 80..9f + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, # a0..bf + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, # c0..df + 0xa, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, # e0..ef + 0xb, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, # f0..ff + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, # s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, # s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, # s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, # s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, # s7..s8 +) + +UTF8_ACCEPT = 0 +UTF8_REJECT = 1 + + +# use Cython implementation of UTF8 validator if available +# +try: + from wsaccel.utf8validator import Utf8Validator + +except ImportError: + # + # Fallback to pure Python implementation - also for PyPy. + # + # Do NOT touch this code unless you know what you are doing! + # https://github.com/oberstet/scratchbox/tree/master/python/utf8 + # + + if PY3: + + # Python 3 and above + + # convert DFA table to bytes (performance) + UTF8VALIDATOR_DFA_S = bytes(UTF8VALIDATOR_DFA) + + class Utf8Validator(object): + """ + Incremental UTF-8 validator with constant memory consumption (minimal state). + + Implements the algorithm "Flexible and Economical UTF-8 Decoder" by + Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). + """ + + def __init__(self): + self.reset() + + def decode(self, b): + """ + Eat one UTF-8 octet, and validate on the fly. + + Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case + ``self.codepoint`` contains the decoded Unicode code point. + + Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered. + + Returns some other positive integer when more octets need to be eaten. + """ + tt = UTF8VALIDATOR_DFA_S[b] + if self.state != UTF8_ACCEPT: + self.codepoint = (b & 0x3f) | (self.codepoint << 6) + else: + self.codepoint = (0xff >> tt) & b + self.state = UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt] + return self.state + + def reset(self): + """ + Reset validator to start new incremental UTF-8 decode/validation. + """ + self.state = UTF8_ACCEPT # the empty string is valid UTF8 + self.codepoint = 0 + self.i = 0 + + def validate(self, ba): + """ + Incrementally validate a chunk of bytes provided as string. + + Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``. + + As soon as an octet is encountered which renders the octet sequence + invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns + the index within the currently consumed chunk, and ``totalIndex`` the + index within the total consumed sequence that was the point of bail out. + When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the + total amount of consumed bytes. + """ + # + # The code here is written for optimal JITting in PyPy, not for best + # readability by your grandma or particular elegance. Do NOT touch! + # + l = len(ba) + i = 0 + state = self.state + while i < l: + # optimized version of decode(), since we are not interested in actual code points + state = UTF8VALIDATOR_DFA_S[256 + (state << 4) + UTF8VALIDATOR_DFA_S[ba[i]]] + if state == UTF8_REJECT: + self.state = state + self.i += i + return False, False, i, self.i + i += 1 + self.state = state + self.i += l + return True, state == UTF8_ACCEPT, l, self.i + + else: + + # convert DFA table to string (performance) + UTF8VALIDATOR_DFA_S = ''.join([chr(c) for c in UTF8VALIDATOR_DFA]) + + class Utf8Validator(object): + """ + Incremental UTF-8 validator with constant memory consumption (minimal state). + + Implements the algorithm "Flexible and Economical UTF-8 Decoder" by + Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). + """ + + def __init__(self): + self.reset() + + def decode(self, b): + """ + Eat one UTF-8 octet, and validate on the fly. + + Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case + ``self.codepoint`` contains the decoded Unicode code point. + + Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered. + + Returns some other positive integer when more octets need to be eaten. + """ + tt = ord(UTF8VALIDATOR_DFA_S[b]) + if self.state != UTF8_ACCEPT: + self.codepoint = (b & 0x3f) | (self.codepoint << 6) + else: + self.codepoint = (0xff >> tt) & b + self.state = ord(UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt]) + return self.state + + def reset(self): + """ + Reset validator to start new incremental UTF-8 decode/validation. + """ + self.state = UTF8_ACCEPT # the empty string is valid UTF8 + self.codepoint = 0 + self.i = 0 + + def validate(self, ba): + """ + Incrementally validate a chunk of bytes provided as string. + + Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``. + + As soon as an octet is encountered which renders the octet sequence + invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns + the index within the currently consumed chunk, and ``totalIndex`` the + index within the total consumed sequence that was the point of bail out. + When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the + total amount of consumed bytes. + """ + # + # The code here is written for optimal JITting in PyPy, not for best + # readability by your grandma or particular elegance. Do NOT touch! + # + l = len(ba) + i = 0 + state = self.state + while i < l: + # optimized version of decode(), since we are not interested in actual code points + try: + state = ord(UTF8VALIDATOR_DFA_S[256 + (state << 4) + ord(UTF8VALIDATOR_DFA_S[ba[i]])]) + except: + import ipdb; ipdb.set_trace() + if state == UTF8_REJECT: + self.state = state + self.i += i + return False, False, i, self.i + i += 1 + self.state = state + self.i += l + return True, state == UTF8_ACCEPT, l, self.i diff --git a/src/lib/geventwebsocket/utils.py b/src/lib/geventwebsocket/utils.py new file mode 100644 index 00000000..2e5bc3b7 --- /dev/null +++ b/src/lib/geventwebsocket/utils.py @@ -0,0 +1,45 @@ +import subprocess + + +def get_version(version=None): + "Returns a PEP 386-compliant version number from VERSION." + + if version is None: + from geventwebsocket import VERSION as version + else: + assert len(version) == 5 + assert version[3] in ('alpha', 'beta', 'rc', 'final') + + # Now build the two parts of the version number: + # main = X.Y[.Z] + # sub = .devN - for pre-alpha releases + # | {a|b|c}N - for alpha, beta and rc releases + + parts = 2 if version[2] == 0 else 3 + main = '.'.join(str(x) for x in version[:parts]) + + sub = '' + if version[3] == 'alpha' and version[4] == 0: + hg_changeset = get_hg_changeset() + if hg_changeset: + sub = '.dev{0}'.format(hg_changeset) + + elif version[3] != 'final': + mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} + sub = mapping[version[3]] + str(version[4]) + + return str(main + sub) + + +def get_hg_changeset(): + rev, err = subprocess.Popen( + 'hg id -i', + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ).communicate() + + if err: + return None + else: + return rev.strip().replace('+', '') diff --git a/src/lib/geventwebsocket/websocket.py b/src/lib/geventwebsocket/websocket.py new file mode 100644 index 00000000..7aad7698 --- /dev/null +++ b/src/lib/geventwebsocket/websocket.py @@ -0,0 +1,570 @@ +import struct +import socket + +from ._compat import string_types, range_type, text_type +from .exceptions import ProtocolError +from .exceptions import WebSocketError +from .exceptions import FrameTooLargeException +from .utf8validator import Utf8Validator + + +MSG_SOCKET_DEAD = "Socket is dead" +MSG_ALREADY_CLOSED = "Connection is already closed" +MSG_CLOSED = "Connection closed" + + +class WebSocket(object): + """ + Base class for supporting websocket operations. + + :ivar environ: The http environment referenced by this connection. + :ivar closed: Whether this connection is closed/closing. + :ivar stream: The underlying file like object that will be read from / + written to by this WebSocket object. + """ + + __slots__ = ('utf8validator', 'utf8validate_last', 'environ', 'closed', + 'stream', 'raw_write', 'raw_read', 'handler') + + OPCODE_CONTINUATION = 0x00 + OPCODE_TEXT = 0x01 + OPCODE_BINARY = 0x02 + OPCODE_CLOSE = 0x08 + OPCODE_PING = 0x09 + OPCODE_PONG = 0x0a + + def __init__(self, environ, stream, handler): + self.environ = environ + self.closed = False + + self.stream = stream + + self.raw_write = stream.write + self.raw_read = stream.read + + self.utf8validator = Utf8Validator() + self.handler = handler + + def __del__(self): + try: + self.close() + except: + # close() may fail if __init__ didn't complete + pass + + def _decode_bytes(self, bytestring): + """ + Internal method used to convert the utf-8 encoded bytestring into + unicode. + + If the conversion fails, the socket will be closed. + """ + + if not bytestring: + return '' + + try: + return bytestring.decode('utf-8') + except UnicodeDecodeError: + self.close(1007) + + raise + + def _encode_bytes(self, text): + """ + :returns: The utf-8 byte string equivalent of `text`. + """ + + if not isinstance(text, str): + text = text_type(text or '') + + return text.encode("utf-8") + + def _is_valid_close_code(self, code): + """ + :returns: Whether the returned close code is a valid hybi return code. + """ + if code < 1000: + return False + + if 1004 <= code <= 1006: + return False + + if 1012 <= code <= 1016: + return False + + if code == 1100: + # not sure about this one but the autobahn fuzzer requires it. + return False + + if 2000 <= code <= 2999: + return False + + return True + + @property + def current_app(self): + if hasattr(self.handler.server.application, 'current_app'): + return self.handler.server.application.current_app + else: + # For backwards compatibility reasons + class MockApp(): + def on_close(self, *args): + pass + + return MockApp() + + @property + def origin(self): + if not self.environ: + return + + return self.environ.get('HTTP_ORIGIN') + + @property + def protocol(self): + if not self.environ: + return + + return self.environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL') + + @property + def version(self): + if not self.environ: + return + + return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION') + + @property + def path(self): + if not self.environ: + return + + return self.environ.get('PATH_INFO') + + @property + def logger(self): + return self.handler.logger + + def handle_close(self, header, payload): + """ + Called when a close frame has been decoded from the stream. + + :param header: The decoded `Header`. + :param payload: The bytestring payload associated with the close frame. + """ + if not payload: + self.close(1000, None) + + return + + if len(payload) < 2: + raise ProtocolError('Invalid close frame: {0} {1}'.format( + header, payload)) + + code = struct.unpack('!H', payload[:2])[0] + payload = payload[2:] + + if payload: + validator = Utf8Validator() + val = validator.validate(payload) + + if not val[0]: + raise UnicodeError + + if not self._is_valid_close_code(code): + raise ProtocolError('Invalid close code {0}'.format(code)) + + self.close(code, payload) + + def handle_ping(self, header, payload): + self.send_frame(payload, self.OPCODE_PONG) + + def handle_pong(self, header, payload): + pass + + def read_frame(self): + """ + Block until a full frame has been read from the socket. + + This is an internal method as calling this will not cleanup correctly + if an exception is called. Use `receive` instead. + + :return: The header and payload as a tuple. + """ + + header = Header.decode_header(self.stream) + + if header.flags: + raise ProtocolError + + if not header.length: + return header, b'' + + try: + payload = self.raw_read(header.length) + except socket.error: + payload = b'' + except Exception: + # TODO log out this exception + payload = b'' + + if len(payload) != header.length: + raise WebSocketError('Unexpected EOF reading frame payload') + + if header.mask: + payload = header.unmask_payload(payload) + + return header, payload + + def validate_utf8(self, payload): + # Make sure the frames are decodable independently + self.utf8validate_last = self.utf8validator.validate(payload) + + if not self.utf8validate_last[0]: + raise UnicodeError("Encountered invalid UTF-8 while processing " + "text message at payload octet index " + "{0:d}".format(self.utf8validate_last[3])) + + def read_message(self): + """ + Return the next text or binary message from the socket. + + This is an internal method as calling this will not cleanup correctly + if an exception is called. Use `receive` instead. + """ + opcode = None + message = bytearray() + + while True: + header, payload = self.read_frame() + f_opcode = header.opcode + + if f_opcode in (self.OPCODE_TEXT, self.OPCODE_BINARY): + # a new frame + if opcode: + raise ProtocolError("The opcode in non-fin frame is " + "expected to be zero, got " + "{0!r}".format(f_opcode)) + + # Start reading a new message, reset the validator + self.utf8validator.reset() + self.utf8validate_last = (True, True, 0, 0) + + opcode = f_opcode + + elif f_opcode == self.OPCODE_CONTINUATION: + if not opcode: + raise ProtocolError("Unexpected frame with opcode=0") + + elif f_opcode == self.OPCODE_PING: + self.handle_ping(header, payload) + continue + + elif f_opcode == self.OPCODE_PONG: + self.handle_pong(header, payload) + continue + + elif f_opcode == self.OPCODE_CLOSE: + self.handle_close(header, payload) + return + + else: + raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode)) + + if opcode == self.OPCODE_TEXT: + self.validate_utf8(payload) + + message += payload + + if header.fin: + break + + if opcode == self.OPCODE_TEXT: + self.validate_utf8(message) + return self._decode_bytes(message) + else: + return message + + def receive(self): + """ + Read and return a message from the stream. If `None` is returned, then + the socket is considered closed/errored. + """ + + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + raise WebSocketError(MSG_ALREADY_CLOSED) + + try: + return self.read_message() + except UnicodeError: + self.close(1007) + except ProtocolError: + self.close(1002) + except socket.timeout: + self.close() + self.current_app.on_close(MSG_CLOSED) + except socket.error: + self.close() + self.current_app.on_close(MSG_CLOSED) + + return None + + def send_frame(self, message, opcode): + """ + Send a frame over the websocket with message as its payload + """ + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + raise WebSocketError(MSG_ALREADY_CLOSED) + + if not message: + return + + if opcode in (self.OPCODE_TEXT, self.OPCODE_PING): + message = self._encode_bytes(message) + elif opcode == self.OPCODE_BINARY: + message = bytes(message) + + header = Header.encode_header(True, opcode, b'', len(message), 0) + + try: + self.raw_write(header + message) + except socket.error: + raise WebSocketError(MSG_SOCKET_DEAD) + except: + raise + + def send(self, message, binary=None): + """ + Send a frame over the websocket with message as its payload + """ + if binary is None: + binary = not isinstance(message, string_types) + + opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT + + try: + self.send_frame(message, opcode) + except WebSocketError: + self.current_app.on_close(MSG_SOCKET_DEAD) + raise WebSocketError(MSG_SOCKET_DEAD) + + def close(self, code=1000, message=b''): + """ + Close the websocket and connection, sending the specified code and + message. The underlying socket object is _not_ closed, that is the + responsibility of the initiator. + """ + + if self.closed: + self.current_app.on_close(MSG_ALREADY_CLOSED) + + try: + message = self._encode_bytes(message) + + self.send_frame(message, opcode=self.OPCODE_CLOSE) + except WebSocketError: + # Failed to write the closing frame but it's ok because we're + # closing the socket anyway. + self.logger.debug("Failed to write closing frame -> closing socket") + finally: + self.logger.debug("Closed WebSocket") + self.closed = True + + self.stream = None + self.raw_write = None + self.raw_read = None + + self.environ = None + + #self.current_app.on_close(MSG_ALREADY_CLOSED) + + +class Stream(object): + """ + Wraps the handler's socket/rfile attributes and makes it in to a file like + object that can be read from/written to by the lower level websocket api. + """ + + __slots__ = ('handler', 'read', 'write') + + def __init__(self, handler): + self.handler = handler + self.read = handler.rfile.read + self.write = handler.socket.sendall + + +class Header(object): + __slots__ = ('fin', 'mask', 'opcode', 'flags', 'length') + + FIN_MASK = 0x80 + OPCODE_MASK = 0x0f + MASK_MASK = 0x80 + LENGTH_MASK = 0x7f + + RSV0_MASK = 0x40 + RSV1_MASK = 0x20 + RSV2_MASK = 0x10 + + # bitwise mask that will determine the reserved bits for a frame header + HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK + + def __init__(self, fin=0, opcode=0, flags=0, length=0): + self.mask = '' + self.fin = fin + self.opcode = opcode + self.flags = flags + self.length = length + + def mask_payload(self, payload): + payload = bytearray(payload) + mask = bytearray(self.mask) + + for i in range_type(self.length): + payload[i] ^= mask[i % 4] + + return payload + + # it's the same operation + unmask_payload = mask_payload + + def __repr__(self): + opcodes = { + 0: 'continuation(0)', + 1: 'text(1)', + 2: 'binary(2)', + 8: 'close(8)', + 9: 'ping(9)', + 10: 'pong(10)' + } + flags = { + 0x40: 'RSV1 MASK', + 0x20: 'RSV2 MASK', + 0x10: 'RSV3 MASK' + } + + return ("
    ").format( + self.fin, + opcodes.get(self.opcode, 'reserved({})'.format(self.opcode)), + self.length, + flags.get(self.flags, 'reserved({})'.format(self.flags)), + self.mask, id(self) + ) + + @classmethod + def decode_header(cls, stream): + """ + Decode a WebSocket header. + + :param stream: A file like object that can be 'read' from. + :returns: A `Header` instance. + """ + read = stream.read + data = read(2) + + if len(data) != 2: + raise WebSocketError("Unexpected EOF while decoding header") + + first_byte, second_byte = struct.unpack('!BB', data) + + header = cls( + fin=first_byte & cls.FIN_MASK == cls.FIN_MASK, + opcode=first_byte & cls.OPCODE_MASK, + flags=first_byte & cls.HEADER_FLAG_MASK, + length=second_byte & cls.LENGTH_MASK) + + has_mask = second_byte & cls.MASK_MASK == cls.MASK_MASK + + if header.opcode > 0x07: + if not header.fin: + raise ProtocolError( + "Received fragmented control frame: {0!r}".format(data)) + + # Control frames MUST have a payload length of 125 bytes or less + if header.length > 125: + raise FrameTooLargeException( + "Control frame cannot be larger than 125 bytes: " + "{0!r}".format(data)) + + if header.length == 126: + # 16 bit length + data = read(2) + + if len(data) != 2: + raise WebSocketError('Unexpected EOF while decoding header') + + header.length = struct.unpack('!H', data)[0] + elif header.length == 127: + # 64 bit length + data = read(8) + + if len(data) != 8: + raise WebSocketError('Unexpected EOF while decoding header') + + header.length = struct.unpack('!Q', data)[0] + + if has_mask: + mask = read(4) + + if len(mask) != 4: + raise WebSocketError('Unexpected EOF while decoding header') + + header.mask = mask + + return header + + @classmethod + def encode_header(cls, fin, opcode, mask, length, flags): + """ + Encodes a WebSocket header. + + :param fin: Whether this is the final frame for this opcode. + :param opcode: The opcode of the payload, see `OPCODE_*` + :param mask: Whether the payload is masked. + :param length: The length of the frame. + :param flags: The RSV* flags. + :return: A bytestring encoded header. + """ + first_byte = opcode + second_byte = 0 + extra = b"" + result = bytearray() + + if fin: + first_byte |= cls.FIN_MASK + + if flags & cls.RSV0_MASK: + first_byte |= cls.RSV0_MASK + + if flags & cls.RSV1_MASK: + first_byte |= cls.RSV1_MASK + + if flags & cls.RSV2_MASK: + first_byte |= cls.RSV2_MASK + + # now deal with length complexities + if length < 126: + second_byte += length + elif length <= 0xffff: + second_byte += 126 + extra = struct.pack('!H', length) + elif length <= 0xffffffffffffffff: + second_byte += 127 + extra = struct.pack('!Q', length) + else: + raise FrameTooLargeException + + if mask: + second_byte |= cls.MASK_MASK + + result.append(first_byte) + result.append(second_byte) + result.extend(extra) + + if mask: + result.extend(mask) + + return result diff --git a/src/lib/libsecp256k1message/__init__.py b/src/lib/libsecp256k1message/__init__.py deleted file mode 100644 index 753f384e..00000000 --- a/src/lib/libsecp256k1message/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .libsecp256k1message import * \ No newline at end of file diff --git a/src/lib/libsecp256k1message/libsecp256k1message.py b/src/lib/libsecp256k1message/libsecp256k1message.py deleted file mode 100644 index 59768b88..00000000 --- a/src/lib/libsecp256k1message/libsecp256k1message.py +++ /dev/null @@ -1,162 +0,0 @@ -import hashlib -import base64 -from coincurve import PrivateKey, PublicKey -from base58 import b58encode_check, b58decode_check -from hmac import compare_digest -from util.Electrum import format as zero_format - -RECID_MIN = 0 -RECID_MAX = 3 -RECID_UNCOMPR = 27 -LEN_COMPACT_SIG = 65 - -class SignatureError(ValueError): - pass - -def bitcoin_address(): - """Generate a public address and a secret address.""" - publickey, secretkey = key_pair() - - public_address = compute_public_address(publickey) - secret_address = compute_secret_address(secretkey) - - return (public_address, secret_address) - -def key_pair(): - """Generate a public key and a secret key.""" - secretkey = PrivateKey() - publickey = PublicKey.from_secret(secretkey.secret) - return (publickey, secretkey) - -def compute_public_address(publickey, compressed=False): - """Convert a public key to a public Bitcoin address.""" - public_plain = b'\x00' + public_digest(publickey, compressed=compressed) - return b58encode_check(public_plain) - -def compute_secret_address(secretkey): - """Convert a secret key to a secret Bitcoin address.""" - secret_plain = b'\x80' + secretkey.secret - return b58encode_check(secret_plain) - -def public_digest(publickey, compressed=False): - """Convert a public key to ripemd160(sha256()) digest.""" - publickey_hex = publickey.format(compressed=compressed) - return hashlib.new('ripemd160', hashlib.sha256(publickey_hex).digest()).digest() - -def address_public_digest(address): - """Convert a public Bitcoin address to ripemd160(sha256()) digest.""" - public_plain = b58decode_check(address) - if not public_plain.startswith(b'\x00') or len(public_plain) != 21: - raise ValueError('Invalid public key digest') - return public_plain[1:] - -def _decode_bitcoin_secret(address): - secret_plain = b58decode_check(address) - if not secret_plain.startswith(b'\x80') or len(secret_plain) != 33: - raise ValueError('Invalid secret key. Uncompressed keys only.') - return secret_plain[1:] - -def recover_public_key(signature, message): - """Recover public key from signature and message. - Recovered public key guarantees a correct signature""" - return PublicKey.from_signature_and_message(signature, message) - -def decode_secret_key(address): - """Convert a secret Bitcoin address to a secret key.""" - return PrivateKey(_decode_bitcoin_secret(address)) - - -def coincurve_sig(electrum_signature): - # coincurve := r + s + recovery_id - # where (0 <= recovery_id <= 3) - # https://github.com/bitcoin-core/secp256k1/blob/0b7024185045a49a1a6a4c5615bf31c94f63d9c4/src/modules/recovery/main_impl.h#L35 - if len(electrum_signature) != LEN_COMPACT_SIG: - raise ValueError('Not a 65-byte compact signature.') - # Compute coincurve recid - recid = (electrum_signature[0] - 27) & 3 - if not (RECID_MIN <= recid <= RECID_MAX): - raise ValueError('Recovery ID %d is not supported.' % recid) - recid_byte = int.to_bytes(recid, length=1, byteorder='big') - return electrum_signature[1:] + recid_byte - - -def electrum_sig(coincurve_signature): - # electrum := recovery_id + r + s - # where (27 <= recovery_id <= 30) - # https://github.com/scintill/bitcoin-signature-tools/blob/ed3f5be5045af74a54c92d3648de98c329d9b4f7/key.cpp#L285 - if len(coincurve_signature) != LEN_COMPACT_SIG: - raise ValueError('Not a 65-byte compact signature.') - # Compute Electrum recid - recid = coincurve_signature[-1] + RECID_UNCOMPR - if not (RECID_UNCOMPR + RECID_MIN <= recid <= RECID_UNCOMPR + RECID_MAX): - raise ValueError('Recovery ID %d is not supported.' % recid) - recid_byte = int.to_bytes(recid, length=1, byteorder='big') - return recid_byte + coincurve_signature[0:-1] - -def sign_data(secretkey, byte_string): - """Sign [byte_string] with [secretkey]. - Return serialized signature compatible with Electrum (ZeroNet).""" - # encode the message - encoded = zero_format(byte_string) - # sign the message and get a coincurve signature - signature = secretkey.sign_recoverable(encoded) - # reserialize signature and return it - return electrum_sig(signature) - -def verify_data(key_digest, electrum_signature, byte_string): - """Verify if [electrum_signature] of [byte_string] is correctly signed and - is signed with the secret counterpart of [key_digest]. - Raise SignatureError if the signature is forged or otherwise problematic.""" - # reserialize signature - signature = coincurve_sig(electrum_signature) - # encode the message - encoded = zero_format(byte_string) - # recover full public key from signature - # "which guarantees a correct signature" - publickey = recover_public_key(signature, encoded) - - # verify that the message is correctly signed by the public key - # correct_sig = verify_sig(publickey, signature, encoded) - - # verify that the public key is what we expect - correct_key = verify_key(publickey, key_digest) - - if not correct_key: - raise SignatureError('Signature is forged!') - -def verify_sig(publickey, signature, byte_string): - return publickey.verify(signature, byte_string) - -def verify_key(publickey, key_digest): - return compare_digest(key_digest, public_digest(publickey)) - -def recover_address(data, sign): - sign_bytes = base64.b64decode(sign) - is_compressed = ((sign_bytes[0] - 27) & 4) != 0 - publickey = recover_public_key(coincurve_sig(sign_bytes), zero_format(data)) - return compute_public_address(publickey, compressed=is_compressed) - -__all__ = [ - 'SignatureError', - 'key_pair', 'compute_public_address', 'compute_secret_address', - 'public_digest', 'address_public_digest', 'recover_public_key', 'decode_secret_key', - 'sign_data', 'verify_data', "recover_address" -] - -if __name__ == "__main__": - import base64, time, multiprocessing - s = time.time() - privatekey = decode_secret_key(b"5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk") - threads = [] - for i in range(1000): - data = bytes("hello", "utf8") - address = recover_address(data, "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=") - print("- Verify x10000: %.3fs %s" % (time.time() - s, address)) - - s = time.time() - for i in range(1000): - privatekey = decode_secret_key(b"5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk") - sign = sign_data(privatekey, b"hello") - sign_b64 = base64.b64encode(sign) - - print("- Sign x1000: %.3fs" % (time.time() - s)) diff --git a/src/lib/opensslVerify/HashInfo.txt b/src/lib/opensslVerify/HashInfo.txt new file mode 100644 index 00000000..58c6257f Binary files /dev/null and b/src/lib/opensslVerify/HashInfo.txt differ diff --git a/src/lib/opensslVerify/OpenSSL License.txt b/src/lib/opensslVerify/OpenSSL License.txt new file mode 100644 index 00000000..3090896c --- /dev/null +++ b/src/lib/opensslVerify/OpenSSL License.txt @@ -0,0 +1,126 @@ + + LICENSE ISSUES + ============== + + The OpenSSL toolkit stays under a dual license, i.e. both the conditions of + the OpenSSL License and the original SSLeay license apply to the toolkit. + See below for the actual license texts. Actually both licenses are BSD-style + Open Source licenses. In case of any license issues related to OpenSSL + please contact openssl-core@openssl.org. + + OpenSSL License + --------------- + +/* ==================================================================== + * Copyright (c) 1998-2016 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * openssl-core@openssl.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.openssl.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + * This product includes cryptographic software written by Eric Young + * (eay@cryptsoft.com). This product includes software written by Tim + * Hudson (tjh@cryptsoft.com). + * + */ + + Original SSLeay License + ----------------------- + +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ diff --git a/src/lib/opensslVerify/ReadMe.txt b/src/lib/opensslVerify/ReadMe.txt new file mode 100644 index 00000000..6480f3d5 --- /dev/null +++ b/src/lib/opensslVerify/ReadMe.txt @@ -0,0 +1,59 @@ +============================================================================= +OpenSSL v1.0.2k Precompiled Binaries for Win32 +----------------------------------------------------------------------------- + + *** Release Information *** + +Release Date: Jan 26, 2017 + +Author: Frederik A. Winkelsdorf (opendec.wordpress.com) + for the Indy Project (www.indyproject.org) + +Requirements: Indy 10.5.5+ (SVN Version or Delphi 2009 and newer) + +Dependencies: The libraries have no noteworthy dependencies + +Installation: Copy both DLL files into your application directory + +Supported OS: Windows 2000 up to Windows 10 + +----------------------------------------------------------------------------- + + *** Legal Disclaimer *** + +THIS SOFTWARE IS PROVIDED BY ITS AUTHOR AND THE INDY PROJECT "AS IS" AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +OpenSSL license terms are provided in the file "OpenSSL License.txt". + +PLEASE CHECK IF YOU NEED TO COMPLY WITH EXPORT RESTRICTIONS FOR CRYPTOGRAPHIC +SOFTWARE AND/OR PATENTS. + +----------------------------------------------------------------------------- + + *** Build Information Win32 *** + +Built with: Microsoft Visual C++ 2008 Express Edition + The Netwide Assembler (NASM) v2.11.08 Win32 + Strawberry Perl v5.22.0.1 Win32 Portable + Windows PowerShell + FinalBuilder 7 + +Commands: perl configure VC-WIN32 + ms\do_nasm + adjusted ms\ntdll.mak (replaced "/MD" with "/MT") + adjusted ms\version32.rc (Indy Information inserted) + nmake -f ms\ntdll.mak + nmake -f ms\ntdll.mak test + editbin.exe /rebase:base=0x11000000 libeay32.dll + editbin.exe /rebase:base=0x12000000 ssleay32.dll + +============================================================================= \ No newline at end of file diff --git a/src/lib/opensslVerify/__init__.py b/src/lib/opensslVerify/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/opensslVerify/gencert.cmd b/src/lib/opensslVerify/gencert.cmd new file mode 100644 index 00000000..105ba6a0 --- /dev/null +++ b/src/lib/opensslVerify/gencert.cmd @@ -0,0 +1,10 @@ +openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -nodes -config openssl.cnf +REM openssl ecparam -name secp521r1 -genkey -param_enc explicit -out key-ecc.pem -config openssl.cnf + +openssl ecparam -name secp256r1 -genkey -out key-ecc.pem +openssl req -new -key key-ecc.pem -x509 -nodes -out cert-ecc.pem -config openssl.cnf + +@echo off +REM openssl ecparam -genkey -name prime256v1 -out key.pem +REM openssl req -new -key key.pem -out csr.pem +REM openssl req -x509 -days 365 -key key.pem -in csr.pem -out certificate.pem \ No newline at end of file diff --git a/src/lib/opensslVerify/libeay32.dll b/src/lib/opensslVerify/libeay32.dll new file mode 100644 index 00000000..3bfa0753 Binary files /dev/null and b/src/lib/opensslVerify/libeay32.dll differ diff --git a/src/lib/opensslVerify/license.txt b/src/lib/opensslVerify/license.txt new file mode 100644 index 00000000..fb03713d --- /dev/null +++ b/src/lib/opensslVerify/license.txt @@ -0,0 +1,127 @@ + + LICENSE ISSUES + ============== + + The OpenSSL toolkit stays under a dual license, i.e. both the conditions of + the OpenSSL License and the original SSLeay license apply to the toolkit. + See below for the actual license texts. Actually both licenses are BSD-style + Open Source licenses. In case of any license issues related to OpenSSL + please contact openssl-core@openssl.org. + + OpenSSL License + --------------- + +/* ==================================================================== + * Copyright (c) 1998-2016 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * openssl-core@openssl.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.openssl.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + * This product includes cryptographic software written by Eric Young + * (eay@cryptsoft.com). This product includes software written by Tim + * Hudson (tjh@cryptsoft.com). + * + */ + + Original SSLeay License + ----------------------- + +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + diff --git a/src/lib/openssl/openssl.cnf b/src/lib/opensslVerify/openssl.cnf similarity index 78% rename from src/lib/openssl/openssl.cnf rename to src/lib/opensslVerify/openssl.cnf index 1c1ec47f..a2c48ba1 100644 --- a/src/lib/openssl/openssl.cnf +++ b/src/lib/opensslVerify/openssl.cnf @@ -1,4 +1,5 @@ [ req ] +prompt = no default_bits = 2048 default_keyfile = server-key.pem distinguished_name = subject @@ -16,7 +17,7 @@ organizationName = Example, LLC # Use a friendly name here because its presented to the user. The server's DNS # names are placed in Subject Alternate Names. Plus, DNS names here is deprecated -# by both IETF and CA/Browser Forums. If you place a DNS name here, then you +# by both IETF and CA/Browser Forums. If you place a DNS name here, then you # must include the DNS name in the SAN too (otherwise, Chrome and others that # strictly follow the CA/Browser Baseline Requirements will fail). commonName = Example Company @@ -31,8 +32,8 @@ authorityKeyIdentifier = keyid,issuer basicConstraints = CA:FALSE keyUsage = digitalSignature, keyEncipherment -extendedKeyUsage = clientAuth, serverAuth subjectAltName = @alternate_names +nsComment = "OpenSSL Generated Certificate" # RFC 5280, Section 4.2.1.12 makes EKU optional # CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused @@ -45,8 +46,8 @@ subjectKeyIdentifier = hash basicConstraints = CA:FALSE keyUsage = digitalSignature, keyEncipherment -extendedKeyUsage = clientAuth, serverAuth subjectAltName = @alternate_names +nsComment = "OpenSSL Generated Certificate" # RFC 5280, Section 4.2.1.12 makes EKU optional # CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused @@ -54,5 +55,16 @@ subjectAltName = @alternate_names [ alternate_names ] -DNS.1 = $ENV::CN -DNS.2 = www.$ENV::CN \ No newline at end of file +DNS.1 = example.com +DNS.2 = www.example.com +DNS.3 = mail.example.com +DNS.4 = ftp.example.com + +# Add these if you need them. But usually you don't want them or +# need them in production. You may need them for development. +# DNS.5 = localhost +# DNS.6 = localhost.localdomain +# DNS.7 = 127.0.0.1 + +# IPv6 localhost +# DNS.8 = ::1 \ No newline at end of file diff --git a/src/lib/opensslVerify/openssl.exe b/src/lib/opensslVerify/openssl.exe new file mode 100644 index 00000000..fbc7561f Binary files /dev/null and b/src/lib/opensslVerify/openssl.exe differ diff --git a/src/lib/opensslVerify/opensslVerify.py b/src/lib/opensslVerify/opensslVerify.py new file mode 100644 index 00000000..d0eede54 --- /dev/null +++ b/src/lib/opensslVerify/opensslVerify.py @@ -0,0 +1,464 @@ +# via http://pastebin.com/H1XikJFd +# -*- Mode: Python -*- + +# This is a combination of http://pastebin.com/bQtdDzHx and +# https://github.com/Bitmessage/PyBitmessage/blob/master/src/pyelliptic/openssl.py +# that doesn't crash on OSX. +# Long message bug fixed by ZeroNet + +import ctypes +import ctypes.util +import _ctypes +import hashlib +import base64 +import time +import logging +import sys +import os + +addrtype = 0 + + +class _OpenSSL: + + """ + Wrapper for OpenSSL using ctypes + """ + + def __init__(self, library): + self.time_opened = time.time() + """ + Build the wrapper + """ + try: + self._lib = ctypes.CDLL(library) + except: + self._lib = ctypes.cdll.LoadLibrary(library) + + self.pointer = ctypes.pointer + self.c_int = ctypes.c_int + self.byref = ctypes.byref + self.create_string_buffer = ctypes.create_string_buffer + + self.BN_new = self._lib.BN_new + self.BN_new.restype = ctypes.c_void_p + self.BN_new.argtypes = [] + + self.BN_copy = self._lib.BN_copy + self.BN_copy.restype = ctypes.c_void_p + self.BN_copy.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_mul_word = self._lib.BN_mul_word + self.BN_mul_word.restype = ctypes.c_int + self.BN_mul_word.argtypes = [ctypes.c_void_p, ctypes.c_int] + + self.BN_set_word = self._lib.BN_set_word + self.BN_set_word.restype = ctypes.c_int + self.BN_set_word.argtypes = [ctypes.c_void_p, ctypes.c_int] + + self.BN_add = self._lib.BN_add + self.BN_add.restype = ctypes.c_void_p + self.BN_add.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_mod_sub = self._lib.BN_mod_sub + self.BN_mod_sub.restype = ctypes.c_int + self.BN_mod_sub.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_mod_mul = self._lib.BN_mod_mul + self.BN_mod_mul.restype = ctypes.c_int + self.BN_mod_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_mod_inverse = self._lib.BN_mod_inverse + self.BN_mod_inverse.restype = ctypes.c_void_p + self.BN_mod_inverse.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.BN_cmp = self._lib.BN_cmp + self.BN_cmp.restype = ctypes.c_int + self.BN_cmp.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_bn2bin = self._lib.BN_bn2bin + self.BN_bn2bin.restype = ctypes.c_int + self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_bin2bn = self._lib.BN_bin2bn + self.BN_bin2bn.restype = ctypes.c_void_p + self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p] + + self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name + self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p + self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int] + + self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group + self.EC_KEY_get0_group.restype = ctypes.c_void_p + self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key + self.EC_KEY_set_public_key.restype = ctypes.c_int + self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_POINT_set_compressed_coordinates_GFp = self._lib.EC_POINT_set_compressed_coordinates_GFp + self.EC_POINT_set_compressed_coordinates_GFp.restype = ctypes.c_int + self.EC_POINT_set_compressed_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] + + self.EC_POINT_new = self._lib.EC_POINT_new + self.EC_POINT_new.restype = ctypes.c_void_p + self.EC_POINT_new.argtypes = [ctypes.c_void_p] + + self.EC_POINT_free = self._lib.EC_POINT_free + self.EC_POINT_free.restype = None + self.EC_POINT_free.argtypes = [ctypes.c_void_p] + + self.EC_GROUP_get_order = self._lib.EC_GROUP_get_order + self.EC_GROUP_get_order.restype = ctypes.c_void_p + self.EC_GROUP_get_order.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_GROUP_get_degree = self._lib.EC_GROUP_get_degree + self.EC_GROUP_get_degree.restype = ctypes.c_void_p + self.EC_GROUP_get_degree.argtypes = [ctypes.c_void_p] + + self.EC_GROUP_get_curve_GFp = self._lib.EC_GROUP_get_curve_GFp + self.EC_GROUP_get_curve_GFp.restype = ctypes.c_void_p + self.EC_GROUP_get_curve_GFp.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_POINT_mul = self._lib.EC_POINT_mul + self.EC_POINT_mul.restype = ctypes.c_int + self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_conv_form = self._lib.EC_KEY_set_conv_form + self.EC_KEY_set_conv_form.restype = None + self.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p, + ctypes.c_int] + + self.BN_CTX_new = self._lib.BN_CTX_new + self._lib.BN_CTX_new.restype = ctypes.c_void_p + self._lib.BN_CTX_new.argtypes = [] + + self.BN_CTX_start = self._lib.BN_CTX_start + self._lib.BN_CTX_start.restype = ctypes.c_void_p + self._lib.BN_CTX_start.argtypes = [ctypes.c_void_p] + + self.BN_CTX_get = self._lib.BN_CTX_get + self._lib.BN_CTX_get.restype = ctypes.c_void_p + self._lib.BN_CTX_get.argtypes = [ctypes.c_void_p] + + self.ECDSA_sign = self._lib.ECDSA_sign + self.ECDSA_sign.restype = ctypes.c_int + self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.ECDSA_verify = self._lib.ECDSA_verify + self.ECDSA_verify.restype = ctypes.c_int + self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] + + self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey + self.i2o_ECPublicKey.restype = ctypes.c_void_p + self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_CTX_free = self._lib.BN_CTX_free + self.BN_CTX_free.restype = None + self.BN_CTX_free.argtypes = [ctypes.c_void_p] + + self.EC_POINT_free = self._lib.EC_POINT_free + self.EC_POINT_free.restype = None + self.EC_POINT_free.argtypes = [ctypes.c_void_p] + +ssl = None + +def openLibrary(): + global ssl + try: + if sys.platform.startswith("win"): + dll_path = os.path.dirname(os.path.abspath(__file__)) + "/" + "libeay32.dll" + elif sys.platform == "cygwin": + dll_path = "/bin/cygcrypto-1.0.0.dll" + elif os.path.isfile("../lib/libcrypto.so"): # ZeroBundle OSX + dll_path = "../lib/libcrypto.so" + elif os.path.isfile("/opt/lib/libcrypto.so.1.0.0"): # For optware and entware + dll_path = "/opt/lib/libcrypto.so.1.0.0" + else: + dll_path = "/usr/local/ssl/lib/libcrypto.so" + ssl = _OpenSSL(dll_path) + assert ssl + except Exception, err: + ssl = _OpenSSL(ctypes.util.find_library('ssl.so.1.0') or ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32') + logging.debug("opensslVerify loaded: %s", ssl._lib) + +openLibrary() +openssl_version = "%.9X" % ssl._lib.SSLeay() + +NID_secp256k1 = 714 + + +def check_result(val, func, args): + if val == 0: + raise ValueError + else: + return ctypes.c_void_p(val) + +ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p +ssl.EC_KEY_new_by_curve_name.errcheck = check_result + +POINT_CONVERSION_COMPRESSED = 2 +POINT_CONVERSION_UNCOMPRESSED = 4 + +__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' +__b58base = len(__b58chars) + + +def b58encode(v): + """ encode v, which is a string of bytes, to base58. + """ + + long_value = 0L + for (i, c) in enumerate(v[::-1]): + long_value += (256 ** i) * ord(c) + + result = '' + while long_value >= __b58base: + div, mod = divmod(long_value, __b58base) + result = __b58chars[mod] + result + long_value = div + result = __b58chars[long_value] + result + + # Bitcoin does a little leading-zero-compression: + # leading 0-bytes in the input become leading-1s + nPad = 0 + for c in v: + if c == '\0': + nPad += 1 + else: + break + + return (__b58chars[0] * nPad) + result + + +def hash_160(public_key): + md = hashlib.new('ripemd160') + md.update(hashlib.sha256(public_key).digest()) + return md.digest() + + +def hash_160_to_bc_address(h160): + vh160 = chr(addrtype) + h160 + h = Hash(vh160) + addr = vh160 + h[0:4] + return b58encode(addr) + + +def public_key_to_bc_address(public_key): + h160 = hash_160(public_key) + return hash_160_to_bc_address(h160) + + +def encode(val, base, minlen=0): + base, minlen = int(base), int(minlen) + code_string = ''.join([chr(x) for x in range(256)]) + result = "" + while val > 0: + result = code_string[val % base] + result + val //= base + return code_string[0] * max(minlen - len(result), 0) + result + + +def num_to_var_int(x): + x = int(x) + if x < 253: + return chr(x) + elif x < 65536: + return chr(253) + encode(x, 256, 2)[::-1] + elif x < 4294967296: + return chr(254) + encode(x, 256, 4)[::-1] + else: + return chr(255) + encode(x, 256, 8)[::-1] + + +def msg_magic(message): + return "\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + message + + +def get_address(eckey): + size = ssl.i2o_ECPublicKey(eckey, 0) + mb = ctypes.create_string_buffer(size) + ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb))) + return public_key_to_bc_address(mb.raw) + + +def Hash(data): + return hashlib.sha256(hashlib.sha256(data).digest()).digest() + + +def bx(bn, size=32): + b = ctypes.create_string_buffer(size) + ssl.BN_bn2bin(bn, b) + return b.raw.encode('hex') + + +def verify_message(address, signature, message): + pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1) + eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), signature) + addr = get_address(eckey) + return (address == addr) + + +def SetCompactSignature(pkey, hash, signature): + sig = base64.b64decode(signature) + if len(sig) != 65: + raise Exception("Wrong encoding") + nV = ord(sig[0]) + if nV < 27 or nV >= 35: + return False + if nV >= 31: + ssl.EC_KEY_set_conv_form(pkey, POINT_CONVERSION_COMPRESSED) + nV -= 4 + r = ssl.BN_bin2bn(sig[1:33], 32, None) + s = ssl.BN_bin2bn(sig[33:], 32, None) + eckey = ECDSA_SIG_recover_key_GFp(pkey, r, s, hash, len(hash), nV - 27, + False) + return eckey + + +def ECDSA_SIG_recover_key_GFp(eckey, r, s, msg, msglen, recid, check): + n = 0 + i = recid / 2 + ctx = R = O = Q = None + + try: + group = ssl.EC_KEY_get0_group(eckey) + ctx = ssl.BN_CTX_new() + ssl.BN_CTX_start(ctx) + order = ssl.BN_CTX_get(ctx) + ssl.EC_GROUP_get_order(group, order, ctx) + x = ssl.BN_CTX_get(ctx) + ssl.BN_copy(x, order) + ssl.BN_mul_word(x, i) + ssl.BN_add(x, x, r) + field = ssl.BN_CTX_get(ctx) + ssl.EC_GROUP_get_curve_GFp(group, field, None, None, ctx) + + if (ssl.BN_cmp(x, field) >= 0): + return False + + R = ssl.EC_POINT_new(group) + ssl.EC_POINT_set_compressed_coordinates_GFp(group, R, x, recid % 2, ctx) + + if check: + O = ssl.EC_POINT_new(group) + ssl.EC_POINT_mul(group, O, None, R, order, ctx) + if ssl.EC_POINT_is_at_infinity(group, O): + return False + + Q = ssl.EC_POINT_new(group) + n = ssl.EC_GROUP_get_degree(group) + e = ssl.BN_CTX_get(ctx) + ssl.BN_bin2bn(msg, msglen, e) + if 8 * msglen > n: + ssl.BN_rshift(e, e, 8 - (n & 7)) + + zero = ssl.BN_CTX_get(ctx) + ssl.BN_set_word(zero, 0) + ssl.BN_mod_sub(e, zero, e, order, ctx) + rr = ssl.BN_CTX_get(ctx) + ssl.BN_mod_inverse(rr, r, order, ctx) + sor = ssl.BN_CTX_get(ctx) + ssl.BN_mod_mul(sor, s, rr, order, ctx) + eor = ssl.BN_CTX_get(ctx) + ssl.BN_mod_mul(eor, e, rr, order, ctx) + ssl.EC_POINT_mul(group, Q, eor, R, sor, ctx) + ssl.EC_KEY_set_public_key(eckey, Q) + return eckey + finally: + if ctx: + ssl.BN_CTX_free(ctx) + if R: + ssl.EC_POINT_free(R) + if O: + ssl.EC_POINT_free(O) + if Q: + ssl.EC_POINT_free(Q) + + +def closeLibrary(): + handle = ssl._lib._handle + if "FreeLibrary" in dir(_ctypes): + _ctypes.FreeLibrary(handle) + _ctypes.FreeLibrary(handle) + print "OpenSSL closed, handle:", handle + else: + _ctypes.dlclose(handle) + _ctypes.dlclose(handle) + print "OpenSSL dlclosed, handle:", handle + + +def getMessagePubkey(message, sig): + pkey = ssl.EC_KEY_new_by_curve_name(NID_secp256k1) + if type(pkey) is not int and not pkey.value: + raise Exception( + "OpenSSL %s (%s) EC_KEY_new_by_curve_name failed: %s, probably your OpenSSL lib does not support secp256k1 elliptic curve. Please check: https://github.com/HelloZeroNet/ZeroNet/issues/132" % + (openssl_version, ssl._lib._name, pkey.value) + ) + eckey = SetCompactSignature(pkey, Hash(msg_magic(message)), sig) + size = ssl.i2o_ECPublicKey(eckey, 0) + mb = ctypes.create_string_buffer(size) + ssl.i2o_ECPublicKey(eckey, ctypes.byref(ctypes.pointer(mb))) + pub = mb.raw + """ + if time.time() - ssl.time_opened > 60 * 5: # Reopen every 5 min + logging.debug("Reopening OpenSSL...") + closeLibrary() + openLibrary() + """ + return pub + + +def test(): + sign = "HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ=" + pubkey = "044827c756561b8ef6b28b5e53a000805adbf4938ab82e1c2b7f7ea16a0d6face9a509a0a13e794d742210b00581f3e249ebcc705240af2540ea19591091ac1d41" + assert getMessagePubkey("hello", sign).encode("hex") == pubkey + +test() # Make sure it working right + +if __name__ == "__main__": + import time + import os + import sys + sys.path.append("../pybitcointools") + import bitcoin as btctools + print "OpenSSL version %s" % openssl_version + print ssl._lib + priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" + address = "1N2XWu5soeppX2qUjvrf81rpdbShKJrjTr" + sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ= + + s = time.time() + for i in range(1000): + pubkey = getMessagePubkey("hello", sign) + verified = btctools.pubkey_to_address(pubkey) == address + print "100x Verified", verified, time.time() - s diff --git a/src/lib/opensslVerify/ssleay32.dll b/src/lib/opensslVerify/ssleay32.dll new file mode 100644 index 00000000..25218ac0 Binary files /dev/null and b/src/lib/opensslVerify/ssleay32.dll differ diff --git a/src/lib/pyaes/README.md b/src/lib/pyaes/README.md deleted file mode 100644 index 26e3b2ba..00000000 --- a/src/lib/pyaes/README.md +++ /dev/null @@ -1,363 +0,0 @@ -pyaes -===== - -A pure-Python implementation of the AES block cipher algorithm and the common modes of operation (CBC, CFB, CTR, ECB and OFB). - - -Features --------- - -* Supports all AES key sizes -* Supports all AES common modes -* Pure-Python (no external dependencies) -* BlockFeeder API allows streams to easily be encrypted and decrypted -* Python 2.x and 3.x support (make sure you pass in bytes(), not strings for Python 3) - - -API ---- - -All keys may be 128 bits (16 bytes), 192 bits (24 bytes) or 256 bits (32 bytes) long. - -To generate a random key use: -```python -import os - -# 128 bit, 192 bit and 256 bit keys -key_128 = os.urandom(16) -key_192 = os.urandom(24) -key_256 = os.urandom(32) -``` - -To generate keys from simple-to-remember passwords, consider using a _password-based key-derivation function_ such as [scrypt](https://github.com/ricmoo/pyscrypt). - - -### Common Modes of Operation - -There are many modes of operations, each with various pros and cons. In general though, the **CBC** and **CTR** modes are recommended. The **ECB is NOT recommended.**, and is included primarily for completeness. - -Each of the following examples assumes the following key: -```python -import pyaes - -# A 256 bit (32 byte) key -key = "This_key_for_demo_purposes_only!" - -# For some modes of operation we need a random initialization vector -# of 16 bytes -iv = "InitializationVe" -``` - - -#### Counter Mode of Operation (recommended) - -```python -aes = pyaes.AESModeOfOperationCTR(key) -plaintext = "Text may be any length you wish, no padding is required" -ciphertext = aes.encrypt(plaintext) - -# '''\xb6\x99\x10=\xa4\x96\x88\xd1\x89\x1co\xe6\x1d\xef;\x11\x03\xe3\xee -# \xa9V?wY\xbfe\xcdO\xe3\xdf\x9dV\x19\xe5\x8dk\x9fh\xb87>\xdb\xa3\xd6 -# \x86\xf4\xbd\xb0\x97\xf1\t\x02\xe9 \xed''' -print repr(ciphertext) - -# The counter mode of operation maintains state, so decryption requires -# a new instance be created -aes = pyaes.AESModeOfOperationCTR(key) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext - -# To use a custom initial value -counter = pyaes.Counter(initial_value = 100) -aes = pyaes.AESModeOfOperationCTR(key, counter = counter) -ciphertext = aes.encrypt(plaintext) - -# '''WZ\x844\x02\xbfoY\x1f\x12\xa6\xce\x03\x82Ei)\xf6\x97mX\x86\xe3\x9d -# _1\xdd\xbd\x87\xb5\xccEM_4\x01$\xa6\x81\x0b\xd5\x04\xd7Al\x07\xe5 -# \xb2\x0e\\\x0f\x00\x13,\x07''' -print repr(ciphertext) -``` - - -#### Cipher-Block Chaining (recommended) - -```python -aes = pyaes.AESModeOfOperationCBC(key, iv = iv) -plaintext = "TextMustBe16Byte" -ciphertext = aes.encrypt(plaintext) - -# '\xd6:\x18\xe6\xb1\xb3\xc3\xdc\x87\xdf\xa7|\x08{k\xb6' -print repr(ciphertext) - - -# The cipher-block chaining mode of operation maintains state, so -# decryption requires a new instance be created -aes = pyaes.AESModeOfOperationCBC(key, iv = iv) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -#### Cipher Feedback - -```python -# Each block into the mode of operation must be a multiple of the segment -# size. For this example we choose 8 bytes. -aes = pyaes.AESModeOfOperationCFB(key, iv = iv, segment_size = 8) -plaintext = "TextMustBeAMultipleOfSegmentSize" -ciphertext = aes.encrypt(plaintext) - -# '''v\xa9\xc1w"\x8aL\x93\xcb\xdf\xa0/\xf8Y\x0b\x8d\x88i\xcb\x85rmp -# \x85\xfe\xafM\x0c)\xd5\xeb\xaf''' -print repr(ciphertext) - - -# The cipher-block chaining mode of operation maintains state, so -# decryption requires a new instance be created -aes = pyaes.AESModeOfOperationCFB(key, iv = iv, segment_size = 8) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -#### Output Feedback Mode of Operation - -```python -aes = pyaes.AESModeOfOperationOFB(key, iv = iv) -plaintext = "Text may be any length you wish, no padding is required" -ciphertext = aes.encrypt(plaintext) - -# '''v\xa9\xc1wO\x92^\x9e\rR\x1e\xf7\xb1\xa2\x9d"l1\xc7\xe7\x9d\x87(\xc26s -# \xdd8\xc8@\xb6\xd9!\xf5\x0cM\xaa\x9b\xc4\xedLD\xe4\xb9\xd8\xdf\x9e\xac -# \xa1\xb8\xea\x0f\x8ev\xb5''' -print repr(ciphertext) - -# The counter mode of operation maintains state, so decryption requires -# a new instance be created -aes = pyaes.AESModeOfOperationOFB(key, iv = iv) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -#### Electronic Codebook (NOT recommended) - -```python -aes = pyaes.AESModeOfOperationECB(key) -plaintext = "TextMustBe16Byte" -ciphertext = aes.encrypt(plaintext) - -# 'L6\x95\x85\xe4\xd9\xf1\x8a\xfb\xe5\x94X\x80|\x19\xc3' -print repr(ciphertext) - -# Since there is no state stored in this mode of operation, it -# is not necessary to create a new aes object for decryption. -#aes = pyaes.AESModeOfOperationECB(key) -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext -``` - - -### BlockFeeder - -Since most of the modes of operations require data in specific block-sized or segment-sized blocks, it can be difficult when working with large arbitrary streams or strings of data. - -The BlockFeeder class is meant to make life easier for you, by buffering bytes across multiple calls and returning bytes as they are available, as well as padding or stripping the output when finished, if necessary. - -```python -import pyaes - -# Any mode of operation can be used; for this example CBC -key = "This_key_for_demo_purposes_only!" -iv = "InitializationVe" - -ciphertext = '' - -# We can encrypt one line at a time, regardles of length -encrypter = pyaes.Encrypter(pyaes.AESModeOfOperationCBC(key, iv)) -for line in file('/etc/passwd'): - ciphertext += encrypter.feed(line) - -# Make a final call to flush any remaining bytes and add paddin -ciphertext += encrypter.feed() - -# We can decrypt the cipher text in chunks (here we split it in half) -decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(key, iv)) -decrypted = decrypter.feed(ciphertext[:len(ciphertext) / 2]) -decrypted += decrypter.feed(ciphertext[len(ciphertext) / 2:]) - -# Again, make a final call to flush any remaining bytes and strip padding -decrypted += decrypter.feed() - -print file('/etc/passwd').read() == decrypted -``` - -### Stream Feeder - -This is meant to make it even easier to encrypt and decrypt streams and large files. - -```python -import pyaes - -# Any mode of operation can be used; for this example CTR -key = "This_key_for_demo_purposes_only!" - -# Create the mode of operation to encrypt with -mode = pyaes.AESModeOfOperationCTR(key) - -# The input and output files -file_in = file('/etc/passwd') -file_out = file('/tmp/encrypted.bin', 'wb') - -# Encrypt the data as a stream, the file is read in 8kb chunks, be default -pyaes.encrypt_stream(mode, file_in, file_out) - -# Close the files -file_in.close() -file_out.close() -``` - -Decrypting is identical, except you would use `pyaes.decrypt_stream`, and the encrypted file would be the `file_in` and target for decryption the `file_out`. - -### AES block cipher - -Generally you should use one of the modes of operation above. This may however be useful for experimenting with a custom mode of operation or dealing with encrypted blocks. - -The block cipher requires exactly one block of data to encrypt or decrypt, and each block should be an array with each element an integer representation of a byte. - -```python -import pyaes - -# 16 byte block of plain text -plaintext = "Hello World!!!!!" -plaintext_bytes = [ ord(c) for c in plaintext ] - -# 32 byte key (256 bit) -key = "This_key_for_demo_purposes_only!" - -# Our AES instance -aes = pyaes.AES(key) - -# Encrypt! -ciphertext = aes.encrypt(plaintext_bytes) - -# [55, 250, 182, 25, 185, 208, 186, 95, 206, 115, 50, 115, 108, 58, 174, 115] -print repr(ciphertext) - -# Decrypt! -decrypted = aes.decrypt(ciphertext) - -# True -print decrypted == plaintext_bytes -``` - -What is a key? --------------- - -This seems to be a point of confusion for many people new to using encryption. You can think of the key as the *"password"*. However, these algorithms require the *"password"* to be a specific length. - -With AES, there are three possible key lengths, 16-bytes, 24-bytes or 32-bytes. When you create an AES object, the key size is automatically detected, so it is important to pass in a key of the correct length. - -Often, you wish to provide a password of arbitrary length, for example, something easy to remember or write down. In these cases, you must come up with a way to transform the password into a key, of a specific length. A **Password-Based Key Derivation Function** (PBKDF) is an algorithm designed for this exact purpose. - -Here is an example, using the popular (possibly obsolete?) *crypt* PBKDF: - -``` -# See: https://www.dlitz.net/software/python-pbkdf2/ -import pbkdf2 - -password = "HelloWorld" - -# The crypt PBKDF returns a 48-byte string -key = pbkdf2.crypt(password) - -# A 16-byte, 24-byte and 32-byte key, respectively -key_16 = key[:16] -key_24 = key[:24] -key_32 = key[:32] -``` - -The [scrypt](https://github.com/ricmoo/pyscrypt) PBKDF is intentionally slow, to make it more difficult to brute-force guess a password: - -``` -# See: https://github.com/ricmoo/pyscrypt -import pyscrypt - -password = "HelloWorld" - -# Salt is required, and prevents Rainbow Table attacks -salt = "SeaSalt" - -# N, r, and p are parameters to specify how difficult it should be to -# generate a key; bigger numbers take longer and more memory -N = 1024 -r = 1 -p = 1 - -# A 16-byte, 24-byte and 32-byte key, respectively; the scrypt algorithm takes -# a 6-th parameter, indicating key length -key_16 = pyscrypt.hash(password, salt, N, r, p, 16) -key_24 = pyscrypt.hash(password, salt, N, r, p, 24) -key_32 = pyscrypt.hash(password, salt, N, r, p, 32) -``` - -Another possibility, is to use a hashing function, such as SHA256 to hash the password, but this method may be vulnerable to [Rainbow Attacks](http://en.wikipedia.org/wiki/Rainbow_table), unless you use a [salt](http://en.wikipedia.org/wiki/Salt_(cryptography)). - -```python -import hashlib - -password = "HelloWorld" - -# The SHA256 hash algorithm returns a 32-byte string -hashed = hashlib.sha256(password).digest() - -# A 16-byte, 24-byte and 32-byte key, respectively -key_16 = hashed[:16] -key_24 = hashed[:24] -key_32 = hashed -``` - - - - -Performance ------------ - -There is a test case provided in _/tests/test-aes.py_ which does some basic performance testing (its primary purpose is moreso as a regression test). - -Based on that test, in **CPython**, this library is about 30x slower than [PyCrypto](https://www.dlitz.net/software/pycrypto/) for CBC, ECB and OFB; about 80x slower for CFB; and 300x slower for CTR. - -Based on that same test, in **Pypy**, this library is about 4x slower than [PyCrypto](https://www.dlitz.net/software/pycrypto/) for CBC, ECB and OFB; about 12x slower for CFB; and 19x slower for CTR. - -The PyCrypto documentation makes reference to the counter call being responsible for the speed problems of the counter (CTR) mode of operation, which is why they use a specially optimized counter. I will investigate this problem further in the future. - - -FAQ ---- - -#### Why do this? - -The short answer, *why not?* - -The longer answer, is for my [pyscrypt](https://github.com/ricmoo/pyscrypt) library. I required a pure-Python AES implementation that supported 256-bit keys with the counter (CTR) mode of operation. After searching, I found several implementations, but all were missing CTR or only supported 128 bit keys. After all the work of learning AES inside and out to implement the library, it was only a marginal amount of extra work to library-ify a more general solution. So, *why not?* - -#### How do I get a question I have added? - -E-mail me at pyaes@ricmoo.com with any questions, suggestions, comments, et cetera. - - -#### Can I give you my money? - -Umm... Ok? :-) - -_Bitcoin_ - `18UDs4qV1shu2CgTS2tKojhCtM69kpnWg9` diff --git a/src/lib/pyaes/__init__.py b/src/lib/pyaes/__init__.py deleted file mode 100644 index 5712f794..00000000 --- a/src/lib/pyaes/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# This is a pure-Python implementation of the AES algorithm and AES common -# modes of operation. - -# See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard -# See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation - - -# Supported key sizes: -# 128-bit -# 192-bit -# 256-bit - - -# Supported modes of operation: -# ECB - Electronic Codebook -# CBC - Cipher-Block Chaining -# CFB - Cipher Feedback -# OFB - Output Feedback -# CTR - Counter - -# See the README.md for API details and general information. - -# Also useful, PyCrypto, a crypto library implemented in C with Python bindings: -# https://www.dlitz.net/software/pycrypto/ - - -VERSION = [1, 3, 0] - -from .aes import AES, AESModeOfOperationCTR, AESModeOfOperationCBC, AESModeOfOperationCFB, AESModeOfOperationECB, AESModeOfOperationOFB, AESModesOfOperation, Counter -from .blockfeeder import decrypt_stream, Decrypter, encrypt_stream, Encrypter -from .blockfeeder import PADDING_NONE, PADDING_DEFAULT diff --git a/src/lib/pyaes/aes.py b/src/lib/pyaes/aes.py deleted file mode 100644 index c6e8bc02..00000000 --- a/src/lib/pyaes/aes.py +++ /dev/null @@ -1,589 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# This is a pure-Python implementation of the AES algorithm and AES common -# modes of operation. - -# See: https://en.wikipedia.org/wiki/Advanced_Encryption_Standard - -# Honestly, the best description of the modes of operations are the wonderful -# diagrams on Wikipedia. They explain in moments what my words could never -# achieve. Hence the inline documentation here is sparer than I'd prefer. -# See: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation - -# Also useful, PyCrypto, a crypto library implemented in C with Python bindings: -# https://www.dlitz.net/software/pycrypto/ - - -# Supported key sizes: -# 128-bit -# 192-bit -# 256-bit - - -# Supported modes of operation: -# ECB - Electronic Codebook -# CBC - Cipher-Block Chaining -# CFB - Cipher Feedback -# OFB - Output Feedback -# CTR - Counter - - -# See the README.md for API details and general information. - - -import copy -import struct - -__all__ = ["AES", "AESModeOfOperationCTR", "AESModeOfOperationCBC", "AESModeOfOperationCFB", - "AESModeOfOperationECB", "AESModeOfOperationOFB", "AESModesOfOperation", "Counter"] - - -def _compact_word(word): - return (word[0] << 24) | (word[1] << 16) | (word[2] << 8) | word[3] - -def _string_to_bytes(text): - return list(ord(c) for c in text) - -def _bytes_to_string(binary): - return "".join(chr(b) for b in binary) - -def _concat_list(a, b): - return a + b - - -# Python 3 compatibility -try: - xrange -except Exception: - xrange = range - - # Python 3 supports bytes, which is already an array of integers - def _string_to_bytes(text): - if isinstance(text, bytes): - return text - return [ord(c) for c in text] - - # In Python 3, we return bytes - def _bytes_to_string(binary): - return bytes(binary) - - # Python 3 cannot concatenate a list onto a bytes, so we bytes-ify it first - def _concat_list(a, b): - return a + bytes(b) - - -# Based *largely* on the Rijndael implementation -# See: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf -class AES(object): - '''Encapsulates the AES block cipher. - - You generally should not need this. Use the AESModeOfOperation classes - below instead.''' - - # Number of rounds by keysize - number_of_rounds = {16: 10, 24: 12, 32: 14} - - # Round constant words - rcon = [ 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a, 0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91 ] - - # S-box and Inverse S-box (S is for Substitution) - S = [ 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16 ] - Si =[ 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb, 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb, 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e, 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25, 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92, 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84, 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06, 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b, 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73, 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e, 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b, 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4, 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f, 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef, 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61, 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d ] - - # Transformations for encryption - T1 = [ 0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554, 0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a, 0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b, 0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b, 0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f, 0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f, 0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5, 0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f, 0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb, 0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497, 0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed, 0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a, 0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594, 0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3, 0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504, 0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d, 0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739, 0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395, 0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883, 0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76, 0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4, 0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b, 0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0, 0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818, 0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651, 0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85, 0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12, 0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9, 0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7, 0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a, 0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8, 0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a ] - T2 = [ 0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5, 0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676, 0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0, 0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0, 0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc, 0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515, 0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a, 0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575, 0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0, 0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484, 0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b, 0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf, 0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585, 0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8, 0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5, 0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2, 0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717, 0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373, 0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888, 0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb, 0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c, 0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979, 0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9, 0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808, 0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6, 0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a, 0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e, 0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e, 0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494, 0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf, 0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868, 0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616 ] - T3 = [ 0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5, 0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76, 0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0, 0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0, 0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc, 0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15, 0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a, 0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75, 0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0, 0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384, 0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b, 0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf, 0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185, 0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8, 0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5, 0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2, 0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17, 0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673, 0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88, 0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb, 0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c, 0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279, 0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9, 0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008, 0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6, 0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a, 0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e, 0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e, 0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394, 0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df, 0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068, 0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16 ] - T4 = [ 0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491, 0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec, 0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb, 0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b, 0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83, 0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a, 0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f, 0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea, 0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b, 0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713, 0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6, 0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85, 0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411, 0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b, 0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1, 0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf, 0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e, 0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6, 0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b, 0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad, 0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8, 0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2, 0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049, 0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810, 0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197, 0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f, 0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c, 0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927, 0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733, 0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5, 0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0, 0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c ] - - # Transformations for decryption - T5 = [ 0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393, 0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f, 0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6, 0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844, 0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4, 0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94, 0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a, 0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c, 0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a, 0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051, 0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff, 0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb, 0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e, 0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a, 0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16, 0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8, 0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34, 0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120, 0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0, 0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef, 0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4, 0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5, 0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b, 0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6, 0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0, 0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f, 0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f, 0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713, 0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c, 0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86, 0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541, 0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742 ] - T6 = [ 0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303, 0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3, 0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9, 0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8, 0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a, 0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b, 0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab, 0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682, 0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe, 0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10, 0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015, 0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee, 0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72, 0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e, 0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a, 0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9, 0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e, 0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611, 0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3, 0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390, 0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf, 0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af, 0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb, 0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8, 0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266, 0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6, 0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551, 0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647, 0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1, 0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db, 0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95, 0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857 ] - T7 = [ 0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3, 0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562, 0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3, 0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9, 0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce, 0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908, 0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655, 0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16, 0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6, 0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e, 0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050, 0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8, 0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a, 0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436, 0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12, 0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e, 0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb, 0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6, 0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1, 0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233, 0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad, 0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3, 0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b, 0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15, 0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2, 0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791, 0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665, 0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6, 0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47, 0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844, 0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d, 0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8 ] - T8 = [ 0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b, 0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5, 0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b, 0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e, 0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d, 0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9, 0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66, 0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced, 0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4, 0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd, 0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60, 0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79, 0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c, 0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24, 0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c, 0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814, 0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b, 0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084, 0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077, 0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22, 0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f, 0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582, 0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb, 0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef, 0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035, 0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17, 0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46, 0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d, 0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a, 0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678, 0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff, 0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0 ] - - # Transformations for decryption key expansion - U1 = [ 0x00000000, 0x0e090d0b, 0x1c121a16, 0x121b171d, 0x3824342c, 0x362d3927, 0x24362e3a, 0x2a3f2331, 0x70486858, 0x7e416553, 0x6c5a724e, 0x62537f45, 0x486c5c74, 0x4665517f, 0x547e4662, 0x5a774b69, 0xe090d0b0, 0xee99ddbb, 0xfc82caa6, 0xf28bc7ad, 0xd8b4e49c, 0xd6bde997, 0xc4a6fe8a, 0xcaaff381, 0x90d8b8e8, 0x9ed1b5e3, 0x8ccaa2fe, 0x82c3aff5, 0xa8fc8cc4, 0xa6f581cf, 0xb4ee96d2, 0xbae79bd9, 0xdb3bbb7b, 0xd532b670, 0xc729a16d, 0xc920ac66, 0xe31f8f57, 0xed16825c, 0xff0d9541, 0xf104984a, 0xab73d323, 0xa57ade28, 0xb761c935, 0xb968c43e, 0x9357e70f, 0x9d5eea04, 0x8f45fd19, 0x814cf012, 0x3bab6bcb, 0x35a266c0, 0x27b971dd, 0x29b07cd6, 0x038f5fe7, 0x0d8652ec, 0x1f9d45f1, 0x119448fa, 0x4be30393, 0x45ea0e98, 0x57f11985, 0x59f8148e, 0x73c737bf, 0x7dce3ab4, 0x6fd52da9, 0x61dc20a2, 0xad766df6, 0xa37f60fd, 0xb16477e0, 0xbf6d7aeb, 0x955259da, 0x9b5b54d1, 0x894043cc, 0x87494ec7, 0xdd3e05ae, 0xd33708a5, 0xc12c1fb8, 0xcf2512b3, 0xe51a3182, 0xeb133c89, 0xf9082b94, 0xf701269f, 0x4de6bd46, 0x43efb04d, 0x51f4a750, 0x5ffdaa5b, 0x75c2896a, 0x7bcb8461, 0x69d0937c, 0x67d99e77, 0x3daed51e, 0x33a7d815, 0x21bccf08, 0x2fb5c203, 0x058ae132, 0x0b83ec39, 0x1998fb24, 0x1791f62f, 0x764dd68d, 0x7844db86, 0x6a5fcc9b, 0x6456c190, 0x4e69e2a1, 0x4060efaa, 0x527bf8b7, 0x5c72f5bc, 0x0605bed5, 0x080cb3de, 0x1a17a4c3, 0x141ea9c8, 0x3e218af9, 0x302887f2, 0x223390ef, 0x2c3a9de4, 0x96dd063d, 0x98d40b36, 0x8acf1c2b, 0x84c61120, 0xaef93211, 0xa0f03f1a, 0xb2eb2807, 0xbce2250c, 0xe6956e65, 0xe89c636e, 0xfa877473, 0xf48e7978, 0xdeb15a49, 0xd0b85742, 0xc2a3405f, 0xccaa4d54, 0x41ecdaf7, 0x4fe5d7fc, 0x5dfec0e1, 0x53f7cdea, 0x79c8eedb, 0x77c1e3d0, 0x65daf4cd, 0x6bd3f9c6, 0x31a4b2af, 0x3fadbfa4, 0x2db6a8b9, 0x23bfa5b2, 0x09808683, 0x07898b88, 0x15929c95, 0x1b9b919e, 0xa17c0a47, 0xaf75074c, 0xbd6e1051, 0xb3671d5a, 0x99583e6b, 0x97513360, 0x854a247d, 0x8b432976, 0xd134621f, 0xdf3d6f14, 0xcd267809, 0xc32f7502, 0xe9105633, 0xe7195b38, 0xf5024c25, 0xfb0b412e, 0x9ad7618c, 0x94de6c87, 0x86c57b9a, 0x88cc7691, 0xa2f355a0, 0xacfa58ab, 0xbee14fb6, 0xb0e842bd, 0xea9f09d4, 0xe49604df, 0xf68d13c2, 0xf8841ec9, 0xd2bb3df8, 0xdcb230f3, 0xcea927ee, 0xc0a02ae5, 0x7a47b13c, 0x744ebc37, 0x6655ab2a, 0x685ca621, 0x42638510, 0x4c6a881b, 0x5e719f06, 0x5078920d, 0x0a0fd964, 0x0406d46f, 0x161dc372, 0x1814ce79, 0x322bed48, 0x3c22e043, 0x2e39f75e, 0x2030fa55, 0xec9ab701, 0xe293ba0a, 0xf088ad17, 0xfe81a01c, 0xd4be832d, 0xdab78e26, 0xc8ac993b, 0xc6a59430, 0x9cd2df59, 0x92dbd252, 0x80c0c54f, 0x8ec9c844, 0xa4f6eb75, 0xaaffe67e, 0xb8e4f163, 0xb6edfc68, 0x0c0a67b1, 0x02036aba, 0x10187da7, 0x1e1170ac, 0x342e539d, 0x3a275e96, 0x283c498b, 0x26354480, 0x7c420fe9, 0x724b02e2, 0x605015ff, 0x6e5918f4, 0x44663bc5, 0x4a6f36ce, 0x587421d3, 0x567d2cd8, 0x37a10c7a, 0x39a80171, 0x2bb3166c, 0x25ba1b67, 0x0f853856, 0x018c355d, 0x13972240, 0x1d9e2f4b, 0x47e96422, 0x49e06929, 0x5bfb7e34, 0x55f2733f, 0x7fcd500e, 0x71c45d05, 0x63df4a18, 0x6dd64713, 0xd731dcca, 0xd938d1c1, 0xcb23c6dc, 0xc52acbd7, 0xef15e8e6, 0xe11ce5ed, 0xf307f2f0, 0xfd0efffb, 0xa779b492, 0xa970b999, 0xbb6bae84, 0xb562a38f, 0x9f5d80be, 0x91548db5, 0x834f9aa8, 0x8d4697a3 ] - U2 = [ 0x00000000, 0x0b0e090d, 0x161c121a, 0x1d121b17, 0x2c382434, 0x27362d39, 0x3a24362e, 0x312a3f23, 0x58704868, 0x537e4165, 0x4e6c5a72, 0x4562537f, 0x74486c5c, 0x7f466551, 0x62547e46, 0x695a774b, 0xb0e090d0, 0xbbee99dd, 0xa6fc82ca, 0xadf28bc7, 0x9cd8b4e4, 0x97d6bde9, 0x8ac4a6fe, 0x81caaff3, 0xe890d8b8, 0xe39ed1b5, 0xfe8ccaa2, 0xf582c3af, 0xc4a8fc8c, 0xcfa6f581, 0xd2b4ee96, 0xd9bae79b, 0x7bdb3bbb, 0x70d532b6, 0x6dc729a1, 0x66c920ac, 0x57e31f8f, 0x5ced1682, 0x41ff0d95, 0x4af10498, 0x23ab73d3, 0x28a57ade, 0x35b761c9, 0x3eb968c4, 0x0f9357e7, 0x049d5eea, 0x198f45fd, 0x12814cf0, 0xcb3bab6b, 0xc035a266, 0xdd27b971, 0xd629b07c, 0xe7038f5f, 0xec0d8652, 0xf11f9d45, 0xfa119448, 0x934be303, 0x9845ea0e, 0x8557f119, 0x8e59f814, 0xbf73c737, 0xb47dce3a, 0xa96fd52d, 0xa261dc20, 0xf6ad766d, 0xfda37f60, 0xe0b16477, 0xebbf6d7a, 0xda955259, 0xd19b5b54, 0xcc894043, 0xc787494e, 0xaedd3e05, 0xa5d33708, 0xb8c12c1f, 0xb3cf2512, 0x82e51a31, 0x89eb133c, 0x94f9082b, 0x9ff70126, 0x464de6bd, 0x4d43efb0, 0x5051f4a7, 0x5b5ffdaa, 0x6a75c289, 0x617bcb84, 0x7c69d093, 0x7767d99e, 0x1e3daed5, 0x1533a7d8, 0x0821bccf, 0x032fb5c2, 0x32058ae1, 0x390b83ec, 0x241998fb, 0x2f1791f6, 0x8d764dd6, 0x867844db, 0x9b6a5fcc, 0x906456c1, 0xa14e69e2, 0xaa4060ef, 0xb7527bf8, 0xbc5c72f5, 0xd50605be, 0xde080cb3, 0xc31a17a4, 0xc8141ea9, 0xf93e218a, 0xf2302887, 0xef223390, 0xe42c3a9d, 0x3d96dd06, 0x3698d40b, 0x2b8acf1c, 0x2084c611, 0x11aef932, 0x1aa0f03f, 0x07b2eb28, 0x0cbce225, 0x65e6956e, 0x6ee89c63, 0x73fa8774, 0x78f48e79, 0x49deb15a, 0x42d0b857, 0x5fc2a340, 0x54ccaa4d, 0xf741ecda, 0xfc4fe5d7, 0xe15dfec0, 0xea53f7cd, 0xdb79c8ee, 0xd077c1e3, 0xcd65daf4, 0xc66bd3f9, 0xaf31a4b2, 0xa43fadbf, 0xb92db6a8, 0xb223bfa5, 0x83098086, 0x8807898b, 0x9515929c, 0x9e1b9b91, 0x47a17c0a, 0x4caf7507, 0x51bd6e10, 0x5ab3671d, 0x6b99583e, 0x60975133, 0x7d854a24, 0x768b4329, 0x1fd13462, 0x14df3d6f, 0x09cd2678, 0x02c32f75, 0x33e91056, 0x38e7195b, 0x25f5024c, 0x2efb0b41, 0x8c9ad761, 0x8794de6c, 0x9a86c57b, 0x9188cc76, 0xa0a2f355, 0xabacfa58, 0xb6bee14f, 0xbdb0e842, 0xd4ea9f09, 0xdfe49604, 0xc2f68d13, 0xc9f8841e, 0xf8d2bb3d, 0xf3dcb230, 0xeecea927, 0xe5c0a02a, 0x3c7a47b1, 0x37744ebc, 0x2a6655ab, 0x21685ca6, 0x10426385, 0x1b4c6a88, 0x065e719f, 0x0d507892, 0x640a0fd9, 0x6f0406d4, 0x72161dc3, 0x791814ce, 0x48322bed, 0x433c22e0, 0x5e2e39f7, 0x552030fa, 0x01ec9ab7, 0x0ae293ba, 0x17f088ad, 0x1cfe81a0, 0x2dd4be83, 0x26dab78e, 0x3bc8ac99, 0x30c6a594, 0x599cd2df, 0x5292dbd2, 0x4f80c0c5, 0x448ec9c8, 0x75a4f6eb, 0x7eaaffe6, 0x63b8e4f1, 0x68b6edfc, 0xb10c0a67, 0xba02036a, 0xa710187d, 0xac1e1170, 0x9d342e53, 0x963a275e, 0x8b283c49, 0x80263544, 0xe97c420f, 0xe2724b02, 0xff605015, 0xf46e5918, 0xc544663b, 0xce4a6f36, 0xd3587421, 0xd8567d2c, 0x7a37a10c, 0x7139a801, 0x6c2bb316, 0x6725ba1b, 0x560f8538, 0x5d018c35, 0x40139722, 0x4b1d9e2f, 0x2247e964, 0x2949e069, 0x345bfb7e, 0x3f55f273, 0x0e7fcd50, 0x0571c45d, 0x1863df4a, 0x136dd647, 0xcad731dc, 0xc1d938d1, 0xdccb23c6, 0xd7c52acb, 0xe6ef15e8, 0xede11ce5, 0xf0f307f2, 0xfbfd0eff, 0x92a779b4, 0x99a970b9, 0x84bb6bae, 0x8fb562a3, 0xbe9f5d80, 0xb591548d, 0xa8834f9a, 0xa38d4697 ] - U3 = [ 0x00000000, 0x0d0b0e09, 0x1a161c12, 0x171d121b, 0x342c3824, 0x3927362d, 0x2e3a2436, 0x23312a3f, 0x68587048, 0x65537e41, 0x724e6c5a, 0x7f456253, 0x5c74486c, 0x517f4665, 0x4662547e, 0x4b695a77, 0xd0b0e090, 0xddbbee99, 0xcaa6fc82, 0xc7adf28b, 0xe49cd8b4, 0xe997d6bd, 0xfe8ac4a6, 0xf381caaf, 0xb8e890d8, 0xb5e39ed1, 0xa2fe8cca, 0xaff582c3, 0x8cc4a8fc, 0x81cfa6f5, 0x96d2b4ee, 0x9bd9bae7, 0xbb7bdb3b, 0xb670d532, 0xa16dc729, 0xac66c920, 0x8f57e31f, 0x825ced16, 0x9541ff0d, 0x984af104, 0xd323ab73, 0xde28a57a, 0xc935b761, 0xc43eb968, 0xe70f9357, 0xea049d5e, 0xfd198f45, 0xf012814c, 0x6bcb3bab, 0x66c035a2, 0x71dd27b9, 0x7cd629b0, 0x5fe7038f, 0x52ec0d86, 0x45f11f9d, 0x48fa1194, 0x03934be3, 0x0e9845ea, 0x198557f1, 0x148e59f8, 0x37bf73c7, 0x3ab47dce, 0x2da96fd5, 0x20a261dc, 0x6df6ad76, 0x60fda37f, 0x77e0b164, 0x7aebbf6d, 0x59da9552, 0x54d19b5b, 0x43cc8940, 0x4ec78749, 0x05aedd3e, 0x08a5d337, 0x1fb8c12c, 0x12b3cf25, 0x3182e51a, 0x3c89eb13, 0x2b94f908, 0x269ff701, 0xbd464de6, 0xb04d43ef, 0xa75051f4, 0xaa5b5ffd, 0x896a75c2, 0x84617bcb, 0x937c69d0, 0x9e7767d9, 0xd51e3dae, 0xd81533a7, 0xcf0821bc, 0xc2032fb5, 0xe132058a, 0xec390b83, 0xfb241998, 0xf62f1791, 0xd68d764d, 0xdb867844, 0xcc9b6a5f, 0xc1906456, 0xe2a14e69, 0xefaa4060, 0xf8b7527b, 0xf5bc5c72, 0xbed50605, 0xb3de080c, 0xa4c31a17, 0xa9c8141e, 0x8af93e21, 0x87f23028, 0x90ef2233, 0x9de42c3a, 0x063d96dd, 0x0b3698d4, 0x1c2b8acf, 0x112084c6, 0x3211aef9, 0x3f1aa0f0, 0x2807b2eb, 0x250cbce2, 0x6e65e695, 0x636ee89c, 0x7473fa87, 0x7978f48e, 0x5a49deb1, 0x5742d0b8, 0x405fc2a3, 0x4d54ccaa, 0xdaf741ec, 0xd7fc4fe5, 0xc0e15dfe, 0xcdea53f7, 0xeedb79c8, 0xe3d077c1, 0xf4cd65da, 0xf9c66bd3, 0xb2af31a4, 0xbfa43fad, 0xa8b92db6, 0xa5b223bf, 0x86830980, 0x8b880789, 0x9c951592, 0x919e1b9b, 0x0a47a17c, 0x074caf75, 0x1051bd6e, 0x1d5ab367, 0x3e6b9958, 0x33609751, 0x247d854a, 0x29768b43, 0x621fd134, 0x6f14df3d, 0x7809cd26, 0x7502c32f, 0x5633e910, 0x5b38e719, 0x4c25f502, 0x412efb0b, 0x618c9ad7, 0x6c8794de, 0x7b9a86c5, 0x769188cc, 0x55a0a2f3, 0x58abacfa, 0x4fb6bee1, 0x42bdb0e8, 0x09d4ea9f, 0x04dfe496, 0x13c2f68d, 0x1ec9f884, 0x3df8d2bb, 0x30f3dcb2, 0x27eecea9, 0x2ae5c0a0, 0xb13c7a47, 0xbc37744e, 0xab2a6655, 0xa621685c, 0x85104263, 0x881b4c6a, 0x9f065e71, 0x920d5078, 0xd9640a0f, 0xd46f0406, 0xc372161d, 0xce791814, 0xed48322b, 0xe0433c22, 0xf75e2e39, 0xfa552030, 0xb701ec9a, 0xba0ae293, 0xad17f088, 0xa01cfe81, 0x832dd4be, 0x8e26dab7, 0x993bc8ac, 0x9430c6a5, 0xdf599cd2, 0xd25292db, 0xc54f80c0, 0xc8448ec9, 0xeb75a4f6, 0xe67eaaff, 0xf163b8e4, 0xfc68b6ed, 0x67b10c0a, 0x6aba0203, 0x7da71018, 0x70ac1e11, 0x539d342e, 0x5e963a27, 0x498b283c, 0x44802635, 0x0fe97c42, 0x02e2724b, 0x15ff6050, 0x18f46e59, 0x3bc54466, 0x36ce4a6f, 0x21d35874, 0x2cd8567d, 0x0c7a37a1, 0x017139a8, 0x166c2bb3, 0x1b6725ba, 0x38560f85, 0x355d018c, 0x22401397, 0x2f4b1d9e, 0x642247e9, 0x692949e0, 0x7e345bfb, 0x733f55f2, 0x500e7fcd, 0x5d0571c4, 0x4a1863df, 0x47136dd6, 0xdccad731, 0xd1c1d938, 0xc6dccb23, 0xcbd7c52a, 0xe8e6ef15, 0xe5ede11c, 0xf2f0f307, 0xfffbfd0e, 0xb492a779, 0xb999a970, 0xae84bb6b, 0xa38fb562, 0x80be9f5d, 0x8db59154, 0x9aa8834f, 0x97a38d46 ] - U4 = [ 0x00000000, 0x090d0b0e, 0x121a161c, 0x1b171d12, 0x24342c38, 0x2d392736, 0x362e3a24, 0x3f23312a, 0x48685870, 0x4165537e, 0x5a724e6c, 0x537f4562, 0x6c5c7448, 0x65517f46, 0x7e466254, 0x774b695a, 0x90d0b0e0, 0x99ddbbee, 0x82caa6fc, 0x8bc7adf2, 0xb4e49cd8, 0xbde997d6, 0xa6fe8ac4, 0xaff381ca, 0xd8b8e890, 0xd1b5e39e, 0xcaa2fe8c, 0xc3aff582, 0xfc8cc4a8, 0xf581cfa6, 0xee96d2b4, 0xe79bd9ba, 0x3bbb7bdb, 0x32b670d5, 0x29a16dc7, 0x20ac66c9, 0x1f8f57e3, 0x16825ced, 0x0d9541ff, 0x04984af1, 0x73d323ab, 0x7ade28a5, 0x61c935b7, 0x68c43eb9, 0x57e70f93, 0x5eea049d, 0x45fd198f, 0x4cf01281, 0xab6bcb3b, 0xa266c035, 0xb971dd27, 0xb07cd629, 0x8f5fe703, 0x8652ec0d, 0x9d45f11f, 0x9448fa11, 0xe303934b, 0xea0e9845, 0xf1198557, 0xf8148e59, 0xc737bf73, 0xce3ab47d, 0xd52da96f, 0xdc20a261, 0x766df6ad, 0x7f60fda3, 0x6477e0b1, 0x6d7aebbf, 0x5259da95, 0x5b54d19b, 0x4043cc89, 0x494ec787, 0x3e05aedd, 0x3708a5d3, 0x2c1fb8c1, 0x2512b3cf, 0x1a3182e5, 0x133c89eb, 0x082b94f9, 0x01269ff7, 0xe6bd464d, 0xefb04d43, 0xf4a75051, 0xfdaa5b5f, 0xc2896a75, 0xcb84617b, 0xd0937c69, 0xd99e7767, 0xaed51e3d, 0xa7d81533, 0xbccf0821, 0xb5c2032f, 0x8ae13205, 0x83ec390b, 0x98fb2419, 0x91f62f17, 0x4dd68d76, 0x44db8678, 0x5fcc9b6a, 0x56c19064, 0x69e2a14e, 0x60efaa40, 0x7bf8b752, 0x72f5bc5c, 0x05bed506, 0x0cb3de08, 0x17a4c31a, 0x1ea9c814, 0x218af93e, 0x2887f230, 0x3390ef22, 0x3a9de42c, 0xdd063d96, 0xd40b3698, 0xcf1c2b8a, 0xc6112084, 0xf93211ae, 0xf03f1aa0, 0xeb2807b2, 0xe2250cbc, 0x956e65e6, 0x9c636ee8, 0x877473fa, 0x8e7978f4, 0xb15a49de, 0xb85742d0, 0xa3405fc2, 0xaa4d54cc, 0xecdaf741, 0xe5d7fc4f, 0xfec0e15d, 0xf7cdea53, 0xc8eedb79, 0xc1e3d077, 0xdaf4cd65, 0xd3f9c66b, 0xa4b2af31, 0xadbfa43f, 0xb6a8b92d, 0xbfa5b223, 0x80868309, 0x898b8807, 0x929c9515, 0x9b919e1b, 0x7c0a47a1, 0x75074caf, 0x6e1051bd, 0x671d5ab3, 0x583e6b99, 0x51336097, 0x4a247d85, 0x4329768b, 0x34621fd1, 0x3d6f14df, 0x267809cd, 0x2f7502c3, 0x105633e9, 0x195b38e7, 0x024c25f5, 0x0b412efb, 0xd7618c9a, 0xde6c8794, 0xc57b9a86, 0xcc769188, 0xf355a0a2, 0xfa58abac, 0xe14fb6be, 0xe842bdb0, 0x9f09d4ea, 0x9604dfe4, 0x8d13c2f6, 0x841ec9f8, 0xbb3df8d2, 0xb230f3dc, 0xa927eece, 0xa02ae5c0, 0x47b13c7a, 0x4ebc3774, 0x55ab2a66, 0x5ca62168, 0x63851042, 0x6a881b4c, 0x719f065e, 0x78920d50, 0x0fd9640a, 0x06d46f04, 0x1dc37216, 0x14ce7918, 0x2bed4832, 0x22e0433c, 0x39f75e2e, 0x30fa5520, 0x9ab701ec, 0x93ba0ae2, 0x88ad17f0, 0x81a01cfe, 0xbe832dd4, 0xb78e26da, 0xac993bc8, 0xa59430c6, 0xd2df599c, 0xdbd25292, 0xc0c54f80, 0xc9c8448e, 0xf6eb75a4, 0xffe67eaa, 0xe4f163b8, 0xedfc68b6, 0x0a67b10c, 0x036aba02, 0x187da710, 0x1170ac1e, 0x2e539d34, 0x275e963a, 0x3c498b28, 0x35448026, 0x420fe97c, 0x4b02e272, 0x5015ff60, 0x5918f46e, 0x663bc544, 0x6f36ce4a, 0x7421d358, 0x7d2cd856, 0xa10c7a37, 0xa8017139, 0xb3166c2b, 0xba1b6725, 0x8538560f, 0x8c355d01, 0x97224013, 0x9e2f4b1d, 0xe9642247, 0xe0692949, 0xfb7e345b, 0xf2733f55, 0xcd500e7f, 0xc45d0571, 0xdf4a1863, 0xd647136d, 0x31dccad7, 0x38d1c1d9, 0x23c6dccb, 0x2acbd7c5, 0x15e8e6ef, 0x1ce5ede1, 0x07f2f0f3, 0x0efffbfd, 0x79b492a7, 0x70b999a9, 0x6bae84bb, 0x62a38fb5, 0x5d80be9f, 0x548db591, 0x4f9aa883, 0x4697a38d ] - - def __init__(self, key): - - if len(key) not in (16, 24, 32): - raise ValueError('Invalid key size') - - rounds = self.number_of_rounds[len(key)] - - # Encryption round keys - self._Ke = [[0] * 4 for i in xrange(rounds + 1)] - - # Decryption round keys - self._Kd = [[0] * 4 for i in xrange(rounds + 1)] - - round_key_count = (rounds + 1) * 4 - KC = len(key) // 4 - - # Convert the key into ints - tk = [ struct.unpack('>i', key[i:i + 4])[0] for i in xrange(0, len(key), 4) ] - - # Copy values into round key arrays - for i in xrange(0, KC): - self._Ke[i // 4][i % 4] = tk[i] - self._Kd[rounds - (i // 4)][i % 4] = tk[i] - - # Key expansion (fips-197 section 5.2) - rconpointer = 0 - t = KC - while t < round_key_count: - - tt = tk[KC - 1] - tk[0] ^= ((self.S[(tt >> 16) & 0xFF] << 24) ^ - (self.S[(tt >> 8) & 0xFF] << 16) ^ - (self.S[ tt & 0xFF] << 8) ^ - self.S[(tt >> 24) & 0xFF] ^ - (self.rcon[rconpointer] << 24)) - rconpointer += 1 - - if KC != 8: - for i in xrange(1, KC): - tk[i] ^= tk[i - 1] - - # Key expansion for 256-bit keys is "slightly different" (fips-197) - else: - for i in xrange(1, KC // 2): - tk[i] ^= tk[i - 1] - tt = tk[KC // 2 - 1] - - tk[KC // 2] ^= (self.S[ tt & 0xFF] ^ - (self.S[(tt >> 8) & 0xFF] << 8) ^ - (self.S[(tt >> 16) & 0xFF] << 16) ^ - (self.S[(tt >> 24) & 0xFF] << 24)) - - for i in xrange(KC // 2 + 1, KC): - tk[i] ^= tk[i - 1] - - # Copy values into round key arrays - j = 0 - while j < KC and t < round_key_count: - self._Ke[t // 4][t % 4] = tk[j] - self._Kd[rounds - (t // 4)][t % 4] = tk[j] - j += 1 - t += 1 - - # Inverse-Cipher-ify the decryption round key (fips-197 section 5.3) - for r in xrange(1, rounds): - for j in xrange(0, 4): - tt = self._Kd[r][j] - self._Kd[r][j] = (self.U1[(tt >> 24) & 0xFF] ^ - self.U2[(tt >> 16) & 0xFF] ^ - self.U3[(tt >> 8) & 0xFF] ^ - self.U4[ tt & 0xFF]) - - def encrypt(self, plaintext): - 'Encrypt a block of plain text using the AES block cipher.' - - if len(plaintext) != 16: - raise ValueError('wrong block length') - - rounds = len(self._Ke) - 1 - (s1, s2, s3) = [1, 2, 3] - a = [0, 0, 0, 0] - - # Convert plaintext to (ints ^ key) - t = [(_compact_word(plaintext[4 * i:4 * i + 4]) ^ self._Ke[0][i]) for i in xrange(0, 4)] - - # Apply round transforms - for r in xrange(1, rounds): - for i in xrange(0, 4): - a[i] = (self.T1[(t[ i ] >> 24) & 0xFF] ^ - self.T2[(t[(i + s1) % 4] >> 16) & 0xFF] ^ - self.T3[(t[(i + s2) % 4] >> 8) & 0xFF] ^ - self.T4[ t[(i + s3) % 4] & 0xFF] ^ - self._Ke[r][i]) - t = copy.copy(a) - - # The last round is special - result = [ ] - for i in xrange(0, 4): - tt = self._Ke[rounds][i] - result.append((self.S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) - result.append((self.S[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) - result.append((self.S[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) - result.append((self.S[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) - - return result - - def decrypt(self, ciphertext): - 'Decrypt a block of cipher text using the AES block cipher.' - - if len(ciphertext) != 16: - raise ValueError('wrong block length') - - rounds = len(self._Kd) - 1 - (s1, s2, s3) = [3, 2, 1] - a = [0, 0, 0, 0] - - # Convert ciphertext to (ints ^ key) - t = [(_compact_word(ciphertext[4 * i:4 * i + 4]) ^ self._Kd[0][i]) for i in xrange(0, 4)] - - # Apply round transforms - for r in xrange(1, rounds): - for i in xrange(0, 4): - a[i] = (self.T5[(t[ i ] >> 24) & 0xFF] ^ - self.T6[(t[(i + s1) % 4] >> 16) & 0xFF] ^ - self.T7[(t[(i + s2) % 4] >> 8) & 0xFF] ^ - self.T8[ t[(i + s3) % 4] & 0xFF] ^ - self._Kd[r][i]) - t = copy.copy(a) - - # The last round is special - result = [ ] - for i in xrange(0, 4): - tt = self._Kd[rounds][i] - result.append((self.Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) - result.append((self.Si[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) - result.append((self.Si[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) - result.append((self.Si[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) - - return result - - -class Counter(object): - '''A counter object for the Counter (CTR) mode of operation. - - To create a custom counter, you can usually just override the - increment method.''' - - def __init__(self, initial_value = 1): - - # Convert the value into an array of bytes long - self._counter = [ ((initial_value >> i) % 256) for i in xrange(128 - 8, -1, -8) ] - - value = property(lambda s: s._counter) - - def increment(self): - '''Increment the counter (overflow rolls back to 0).''' - - for i in xrange(len(self._counter) - 1, -1, -1): - self._counter[i] += 1 - - if self._counter[i] < 256: break - - # Carry the one - self._counter[i] = 0 - - # Overflow - else: - self._counter = [ 0 ] * len(self._counter) - - -class AESBlockModeOfOperation(object): - '''Super-class for AES modes of operation that require blocks.''' - def __init__(self, key): - self._aes = AES(key) - - def decrypt(self, ciphertext): - raise Exception('not implemented') - - def encrypt(self, plaintext): - raise Exception('not implemented') - - -class AESStreamModeOfOperation(AESBlockModeOfOperation): - '''Super-class for AES modes of operation that are stream-ciphers.''' - -class AESSegmentModeOfOperation(AESStreamModeOfOperation): - '''Super-class for AES modes of operation that segment data.''' - - segment_bytes = 16 - - - -class AESModeOfOperationECB(AESBlockModeOfOperation): - '''AES Electronic Codebook Mode of Operation. - - o Block-cipher, so data must be padded to 16 byte boundaries - - Security Notes: - o This mode is not recommended - o Any two identical blocks produce identical encrypted values, - exposing data patterns. (See the image of Tux on wikipedia) - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Electronic_codebook_.28ECB.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.1''' - - - name = "Electronic Codebook (ECB)" - - def encrypt(self, plaintext): - if len(plaintext) != 16: - raise ValueError('plaintext block must be 16 bytes') - - plaintext = _string_to_bytes(plaintext) - return _bytes_to_string(self._aes.encrypt(plaintext)) - - def decrypt(self, ciphertext): - if len(ciphertext) != 16: - raise ValueError('ciphertext block must be 16 bytes') - - ciphertext = _string_to_bytes(ciphertext) - return _bytes_to_string(self._aes.decrypt(ciphertext)) - - - -class AESModeOfOperationCBC(AESBlockModeOfOperation): - '''AES Cipher-Block Chaining Mode of Operation. - - o The Initialization Vector (IV) - o Block-cipher, so data must be padded to 16 byte boundaries - o An incorrect initialization vector will only cause the first - block to be corrupt; all other blocks will be intact - o A corrupt bit in the cipher text will cause a block to be - corrupted, and the next block to be inverted, but all other - blocks will be intact. - - Security Notes: - o This method (and CTR) ARE recommended. - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher-block_chaining_.28CBC.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.2''' - - - name = "Cipher-Block Chaining (CBC)" - - def __init__(self, key, iv = None): - if iv is None: - self._last_cipherblock = [ 0 ] * 16 - elif len(iv) != 16: - raise ValueError('initialization vector must be 16 bytes') - else: - self._last_cipherblock = _string_to_bytes(iv) - - AESBlockModeOfOperation.__init__(self, key) - - def encrypt(self, plaintext): - if len(plaintext) != 16: - raise ValueError('plaintext block must be 16 bytes') - - plaintext = _string_to_bytes(plaintext) - precipherblock = [ (p ^ l) for (p, l) in zip(plaintext, self._last_cipherblock) ] - self._last_cipherblock = self._aes.encrypt(precipherblock) - - return _bytes_to_string(self._last_cipherblock) - - def decrypt(self, ciphertext): - if len(ciphertext) != 16: - raise ValueError('ciphertext block must be 16 bytes') - - cipherblock = _string_to_bytes(ciphertext) - plaintext = [ (p ^ l) for (p, l) in zip(self._aes.decrypt(cipherblock), self._last_cipherblock) ] - self._last_cipherblock = cipherblock - - return _bytes_to_string(plaintext) - - - -class AESModeOfOperationCFB(AESSegmentModeOfOperation): - '''AES Cipher Feedback Mode of Operation. - - o A stream-cipher, so input does not need to be padded to blocks, - but does need to be padded to segment_size - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_feedback_.28CFB.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.3''' - - - name = "Cipher Feedback (CFB)" - - def __init__(self, key, iv, segment_size = 1): - if segment_size == 0: segment_size = 1 - - if iv is None: - self._shift_register = [ 0 ] * 16 - elif len(iv) != 16: - raise ValueError('initialization vector must be 16 bytes') - else: - self._shift_register = _string_to_bytes(iv) - - self._segment_bytes = segment_size - - AESBlockModeOfOperation.__init__(self, key) - - segment_bytes = property(lambda s: s._segment_bytes) - - def encrypt(self, plaintext): - if len(plaintext) % self._segment_bytes != 0: - raise ValueError('plaintext block must be a multiple of segment_size') - - plaintext = _string_to_bytes(plaintext) - - # Break block into segments - encrypted = [ ] - for i in xrange(0, len(plaintext), self._segment_bytes): - plaintext_segment = plaintext[i: i + self._segment_bytes] - xor_segment = self._aes.encrypt(self._shift_register)[:len(plaintext_segment)] - cipher_segment = [ (p ^ x) for (p, x) in zip(plaintext_segment, xor_segment) ] - - # Shift the top bits out and the ciphertext in - self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) - - encrypted.extend(cipher_segment) - - return _bytes_to_string(encrypted) - - def decrypt(self, ciphertext): - if len(ciphertext) % self._segment_bytes != 0: - raise ValueError('ciphertext block must be a multiple of segment_size') - - ciphertext = _string_to_bytes(ciphertext) - - # Break block into segments - decrypted = [ ] - for i in xrange(0, len(ciphertext), self._segment_bytes): - cipher_segment = ciphertext[i: i + self._segment_bytes] - xor_segment = self._aes.encrypt(self._shift_register)[:len(cipher_segment)] - plaintext_segment = [ (p ^ x) for (p, x) in zip(cipher_segment, xor_segment) ] - - # Shift the top bits out and the ciphertext in - self._shift_register = _concat_list(self._shift_register[len(cipher_segment):], cipher_segment) - - decrypted.extend(plaintext_segment) - - return _bytes_to_string(decrypted) - - - -class AESModeOfOperationOFB(AESStreamModeOfOperation): - '''AES Output Feedback Mode of Operation. - - o A stream-cipher, so input does not need to be padded to blocks, - allowing arbitrary length data. - o A bit twiddled in the cipher text, twiddles the same bit in the - same bit in the plain text, which can be useful for error - correction techniques. - - Also see: - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Output_feedback_.28OFB.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.4''' - - - name = "Output Feedback (OFB)" - - def __init__(self, key, iv = None): - if iv is None: - self._last_precipherblock = [ 0 ] * 16 - elif len(iv) != 16: - raise ValueError('initialization vector must be 16 bytes') - else: - self._last_precipherblock = _string_to_bytes(iv) - - self._remaining_block = [ ] - - AESBlockModeOfOperation.__init__(self, key) - - def encrypt(self, plaintext): - encrypted = [ ] - for p in _string_to_bytes(plaintext): - if len(self._remaining_block) == 0: - self._remaining_block = self._aes.encrypt(self._last_precipherblock) - self._last_precipherblock = [ ] - precipherbyte = self._remaining_block.pop(0) - self._last_precipherblock.append(precipherbyte) - cipherbyte = p ^ precipherbyte - encrypted.append(cipherbyte) - - return _bytes_to_string(encrypted) - - def decrypt(self, ciphertext): - # AES-OFB is symetric - return self.encrypt(ciphertext) - - - -class AESModeOfOperationCTR(AESStreamModeOfOperation): - '''AES Counter Mode of Operation. - - o A stream-cipher, so input does not need to be padded to blocks, - allowing arbitrary length data. - o The counter must be the same size as the key size (ie. len(key)) - o Each block independant of the other, so a corrupt byte will not - damage future blocks. - o Each block has a uniue counter value associated with it, which - contributes to the encrypted value, so no data patterns are - leaked. - o Also known as: Counter Mode (CM), Integer Counter Mode (ICM) and - Segmented Integer Counter (SIC - - Security Notes: - o This method (and CBC) ARE recommended. - o Each message block is associated with a counter value which must be - unique for ALL messages with the same key. Otherwise security may be - compromised. - - Also see: - - o https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Counter_.28CTR.29 - o See NIST SP800-38A (http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf); section 6.5 - and Appendix B for managing the initial counter''' - - - name = "Counter (CTR)" - - def __init__(self, key, counter = None): - AESBlockModeOfOperation.__init__(self, key) - - if counter is None: - counter = Counter() - - self._counter = counter - self._remaining_counter = [ ] - - def encrypt(self, plaintext): - while len(self._remaining_counter) < len(plaintext): - self._remaining_counter += self._aes.encrypt(self._counter.value) - self._counter.increment() - - plaintext = _string_to_bytes(plaintext) - - encrypted = [ (p ^ c) for (p, c) in zip(plaintext, self._remaining_counter) ] - self._remaining_counter = self._remaining_counter[len(encrypted):] - - return _bytes_to_string(encrypted) - - def decrypt(self, crypttext): - # AES-CTR is symetric - return self.encrypt(crypttext) - - -# Simple lookup table for each mode -AESModesOfOperation = dict( - ctr = AESModeOfOperationCTR, - cbc = AESModeOfOperationCBC, - cfb = AESModeOfOperationCFB, - ecb = AESModeOfOperationECB, - ofb = AESModeOfOperationOFB, -) diff --git a/src/lib/pyaes/blockfeeder.py b/src/lib/pyaes/blockfeeder.py deleted file mode 100644 index b9a904d2..00000000 --- a/src/lib/pyaes/blockfeeder.py +++ /dev/null @@ -1,227 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - - -from .aes import AESBlockModeOfOperation, AESSegmentModeOfOperation, AESStreamModeOfOperation -from .util import append_PKCS7_padding, strip_PKCS7_padding, to_bufferable - - -# First we inject three functions to each of the modes of operations -# -# _can_consume(size) -# - Given a size, determine how many bytes could be consumed in -# a single call to either the decrypt or encrypt method -# -# _final_encrypt(data, padding = PADDING_DEFAULT) -# - call and return encrypt on this (last) chunk of data, -# padding as necessary; this will always be at least 16 -# bytes unless the total incoming input was less than 16 -# bytes -# -# _final_decrypt(data, padding = PADDING_DEFAULT) -# - same as _final_encrypt except for decrypt, for -# stripping off padding -# - -PADDING_NONE = 'none' -PADDING_DEFAULT = 'default' - -# @TODO: Ciphertext stealing and explicit PKCS#7 -# PADDING_CIPHERTEXT_STEALING -# PADDING_PKCS7 - -# ECB and CBC are block-only ciphers - -def _block_can_consume(self, size): - if size >= 16: return 16 - return 0 - -# After padding, we may have more than one block -def _block_final_encrypt(self, data, padding = PADDING_DEFAULT): - if padding == PADDING_DEFAULT: - data = append_PKCS7_padding(data) - - elif padding == PADDING_NONE: - if len(data) != 16: - raise Exception('invalid data length for final block') - else: - raise Exception('invalid padding option') - - if len(data) == 32: - return self.encrypt(data[:16]) + self.encrypt(data[16:]) - - return self.encrypt(data) - - -def _block_final_decrypt(self, data, padding = PADDING_DEFAULT): - if padding == PADDING_DEFAULT: - return strip_PKCS7_padding(self.decrypt(data)) - - if padding == PADDING_NONE: - if len(data) != 16: - raise Exception('invalid data length for final block') - return self.decrypt(data) - - raise Exception('invalid padding option') - -AESBlockModeOfOperation._can_consume = _block_can_consume -AESBlockModeOfOperation._final_encrypt = _block_final_encrypt -AESBlockModeOfOperation._final_decrypt = _block_final_decrypt - - - -# CFB is a segment cipher - -def _segment_can_consume(self, size): - return self.segment_bytes * int(size // self.segment_bytes) - -# CFB can handle a non-segment-sized block at the end using the remaining cipherblock -def _segment_final_encrypt(self, data, padding = PADDING_DEFAULT): - if padding != PADDING_DEFAULT: - raise Exception('invalid padding option') - - faux_padding = (chr(0) * (self.segment_bytes - (len(data) % self.segment_bytes))) - padded = data + to_bufferable(faux_padding) - return self.encrypt(padded)[:len(data)] - -# CFB can handle a non-segment-sized block at the end using the remaining cipherblock -def _segment_final_decrypt(self, data, padding = PADDING_DEFAULT): - if padding != PADDING_DEFAULT: - raise Exception('invalid padding option') - - faux_padding = (chr(0) * (self.segment_bytes - (len(data) % self.segment_bytes))) - padded = data + to_bufferable(faux_padding) - return self.decrypt(padded)[:len(data)] - -AESSegmentModeOfOperation._can_consume = _segment_can_consume -AESSegmentModeOfOperation._final_encrypt = _segment_final_encrypt -AESSegmentModeOfOperation._final_decrypt = _segment_final_decrypt - - - -# OFB and CTR are stream ciphers - -def _stream_can_consume(self, size): - return size - -def _stream_final_encrypt(self, data, padding = PADDING_DEFAULT): - if padding not in [PADDING_NONE, PADDING_DEFAULT]: - raise Exception('invalid padding option') - - return self.encrypt(data) - -def _stream_final_decrypt(self, data, padding = PADDING_DEFAULT): - if padding not in [PADDING_NONE, PADDING_DEFAULT]: - raise Exception('invalid padding option') - - return self.decrypt(data) - -AESStreamModeOfOperation._can_consume = _stream_can_consume -AESStreamModeOfOperation._final_encrypt = _stream_final_encrypt -AESStreamModeOfOperation._final_decrypt = _stream_final_decrypt - - - -class BlockFeeder(object): - '''The super-class for objects to handle chunking a stream of bytes - into the appropriate block size for the underlying mode of operation - and applying (or stripping) padding, as necessary.''' - - def __init__(self, mode, feed, final, padding = PADDING_DEFAULT): - self._mode = mode - self._feed = feed - self._final = final - self._buffer = to_bufferable("") - self._padding = padding - - def feed(self, data = None): - '''Provide bytes to encrypt (or decrypt), returning any bytes - possible from this or any previous calls to feed. - - Call with None or an empty string to flush the mode of - operation and return any final bytes; no further calls to - feed may be made.''' - - if self._buffer is None: - raise ValueError('already finished feeder') - - # Finalize; process the spare bytes we were keeping - if data is None: - result = self._final(self._buffer, self._padding) - self._buffer = None - return result - - self._buffer += to_bufferable(data) - - # We keep 16 bytes around so we can determine padding - result = to_bufferable('') - while len(self._buffer) > 16: - can_consume = self._mode._can_consume(len(self._buffer) - 16) - if can_consume == 0: break - result += self._feed(self._buffer[:can_consume]) - self._buffer = self._buffer[can_consume:] - - return result - - -class Encrypter(BlockFeeder): - 'Accepts bytes of plaintext and returns encrypted ciphertext.' - - def __init__(self, mode, padding = PADDING_DEFAULT): - BlockFeeder.__init__(self, mode, mode.encrypt, mode._final_encrypt, padding) - - -class Decrypter(BlockFeeder): - 'Accepts bytes of ciphertext and returns decrypted plaintext.' - - def __init__(self, mode, padding = PADDING_DEFAULT): - BlockFeeder.__init__(self, mode, mode.decrypt, mode._final_decrypt, padding) - - -# 8kb blocks -BLOCK_SIZE = (1 << 13) - -def _feed_stream(feeder, in_stream, out_stream, block_size = BLOCK_SIZE): - 'Uses feeder to read and convert from in_stream and write to out_stream.' - - while True: - chunk = in_stream.read(block_size) - if not chunk: - break - converted = feeder.feed(chunk) - out_stream.write(converted) - converted = feeder.feed() - out_stream.write(converted) - - -def encrypt_stream(mode, in_stream, out_stream, block_size = BLOCK_SIZE, padding = PADDING_DEFAULT): - 'Encrypts a stream of bytes from in_stream to out_stream using mode.' - - encrypter = Encrypter(mode, padding = padding) - _feed_stream(encrypter, in_stream, out_stream, block_size) - - -def decrypt_stream(mode, in_stream, out_stream, block_size = BLOCK_SIZE, padding = PADDING_DEFAULT): - 'Decrypts a stream of bytes from in_stream to out_stream using mode.' - - decrypter = Decrypter(mode, padding = padding) - _feed_stream(decrypter, in_stream, out_stream, block_size) diff --git a/src/lib/pyaes/util.py b/src/lib/pyaes/util.py deleted file mode 100644 index 081a3759..00000000 --- a/src/lib/pyaes/util.py +++ /dev/null @@ -1,60 +0,0 @@ -# The MIT License (MIT) -# -# Copyright (c) 2014 Richard Moore -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -# Why to_bufferable? -# Python 3 is very different from Python 2.x when it comes to strings of text -# and strings of bytes; in Python 3, strings of bytes do not exist, instead to -# represent arbitrary binary data, we must use the "bytes" object. This method -# ensures the object behaves as we need it to. - -def to_bufferable(binary): - return binary - -def _get_byte(c): - return ord(c) - -try: - xrange -except: - - def to_bufferable(binary): - if isinstance(binary, bytes): - return binary - return bytes(ord(b) for b in binary) - - def _get_byte(c): - return c - -def append_PKCS7_padding(data): - pad = 16 - (len(data) % 16) - return data + to_bufferable(chr(pad) * pad) - -def strip_PKCS7_padding(data): - if len(data) % 16 != 0: - raise ValueError("invalid length") - - pad = _get_byte(data[-1]) - - if pad > 16: - raise ValueError("invalid padding byte") - - return data[:-pad] diff --git a/src/lib/pyasn1/LICENSE.rst b/src/lib/pyasn1/LICENSE.rst new file mode 100644 index 00000000..02b45c43 --- /dev/null +++ b/src/lib/pyasn1/LICENSE.rst @@ -0,0 +1,24 @@ +Copyright (c) 2005-2017, Ilya Etingof +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/src/lib/pyasn1/__init__.py b/src/lib/pyasn1/__init__.py new file mode 100644 index 00000000..091f6c3c --- /dev/null +++ b/src/lib/pyasn1/__init__.py @@ -0,0 +1,8 @@ +import sys + +# http://www.python.org/dev/peps/pep-0396/ +__version__ = '0.2.4' + +if sys.version_info[:2] < (2, 4): + raise RuntimeError('PyASN1 requires Python 2.4 or later') + diff --git a/src/lib/pyasn1/codec/__init__.py b/src/lib/pyasn1/codec/__init__.py new file mode 100644 index 00000000..8c3066b2 --- /dev/null +++ b/src/lib/pyasn1/codec/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/codec/ber/__init__.py b/src/lib/pyasn1/codec/ber/__init__.py new file mode 100644 index 00000000..8c3066b2 --- /dev/null +++ b/src/lib/pyasn1/codec/ber/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/codec/ber/decoder.py b/src/lib/pyasn1/codec/ber/decoder.py new file mode 100644 index 00000000..e100e750 --- /dev/null +++ b/src/lib/pyasn1/codec/ber/decoder.py @@ -0,0 +1,1048 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import base, tag, univ, char, useful, tagmap +from pyasn1.codec.ber import eoo +from pyasn1.compat.octets import oct2int, octs2ints, ints2octs, ensureString, null +from pyasn1.compat.integer import from_bytes +from pyasn1 import debug, error + +__all__ = ['decode'] + + +class AbstractDecoder(object): + protoComponent = None + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,)) + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,)) + + +class AbstractSimpleDecoder(AbstractDecoder): + tagFormats = (tag.tagFormatSimple,) + + @staticmethod + def substrateCollector(asn1Object, substrate, length): + return substrate[:length], substrate[length:] + + def _createComponent(self, asn1Spec, tagSet, value=None): + if tagSet[0].tagFormat not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) + if asn1Spec is None: + return self.protoComponent.clone(value, tagSet) + elif value is None: + return asn1Spec + else: + return asn1Spec.clone(value) + + +class AbstractConstructedDecoder(AbstractDecoder): + tagFormats = (tag.tagFormatConstructed,) + + # noinspection PyUnusedLocal + def _createComponent(self, asn1Spec, tagSet, value=None): + if tagSet[0].tagFormat not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) + if asn1Spec is None: + return self.protoComponent.clone(tagSet) + else: + return asn1Spec.clone() + + +class ExplicitTagDecoder(AbstractSimpleDecoder): + protoComponent = univ.Any('') + tagFormats = (tag.tagFormatConstructed,) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if substrateFun: + return substrateFun( + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) + head, tail = substrate[:length], substrate[length:] + value, _ = decodeFun(head, asn1Spec, tagSet, length) + return value, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if substrateFun: + return substrateFun( + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) + value, substrate = decodeFun(substrate, asn1Spec, tagSet, length) + terminator, substrate = decodeFun(substrate, allowEoo=True) + if terminator is eoo.endOfOctets: + return value, substrate + else: + raise error.PyAsn1Error('Missing end-of-octets terminator') + + +explicitTagDecoder = ExplicitTagDecoder() + + +class IntegerDecoder(AbstractSimpleDecoder): + protoComponent = univ.Integer(0) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + + if not head: + return self._createComponent(asn1Spec, tagSet, 0), tail + + value = from_bytes(head, signed=True) + + return self._createComponent(asn1Spec, tagSet, value), tail + + +class BooleanDecoder(IntegerDecoder): + protoComponent = univ.Boolean(0) + + def _createComponent(self, asn1Spec, tagSet, value=None): + return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0) + + +class BitStringDecoder(AbstractSimpleDecoder): + protoComponent = univ.BitString(()) + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + supportConstructedForm = True + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check? + if not head: + raise error.PyAsn1Error('Empty substrate') + trailingBits = oct2int(head[0]) + if trailingBits > 7: + raise error.PyAsn1Error( + 'Trailing bits overflow %s' % trailingBits + ) + head = head[1:] + value = self.protoComponent.fromOctetString(head, trailingBits) + return self._createComponent(asn1Spec, tagSet, value), tail + + if not self.supportConstructedForm: + raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__) + + bitString = self._createComponent(asn1Spec, tagSet) + + if substrateFun: + return substrateFun(bitString, substrate, length) + + while head: + component, head = decodeFun(head, self.protoComponent) + bitString += component + + return bitString, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + bitString = self._createComponent(asn1Spec, tagSet) + + if substrateFun: + return substrateFun(bitString, substrate, length) + + while substrate: + component, substrate = decodeFun(substrate, self.protoComponent, allowEoo=True) + if component is eoo.endOfOctets: + break + + bitString += component + + else: + raise error.SubstrateUnderrunError('No EOO seen before substrate ends') + + return bitString, substrate + + +class OctetStringDecoder(AbstractSimpleDecoder): + protoComponent = univ.OctetString('') + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + supportConstructedForm = True + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + + if substrateFun: + return substrateFun(self._createComponent(asn1Spec, tagSet), + substrate, length) + + if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check? + return self._createComponent(asn1Spec, tagSet, head), tail + + if not self.supportConstructedForm: + raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__) + + # All inner fragments are of the same type, treat them as octet string + substrateFun = self.substrateCollector + + header = null + + while head: + component, head = decodeFun(head, self.protoComponent, + substrateFun=substrateFun) + header += component + + return self._createComponent(asn1Spec, tagSet, header), tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if substrateFun and substrateFun is not self.substrateCollector: + asn1Object = self._createComponent(asn1Spec, tagSet) + return substrateFun(asn1Object, substrate, length) + + # All inner fragments are of the same type, treat them as octet string + substrateFun = self.substrateCollector + + header = null + + while substrate: + component, substrate = decodeFun(substrate, + self.protoComponent, + substrateFun=substrateFun, + allowEoo=True) + if component is eoo.endOfOctets: + break + header += component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + return self._createComponent(asn1Spec, tagSet, header), substrate + + +class NullDecoder(AbstractSimpleDecoder): + protoComponent = univ.Null('') + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + component = self._createComponent(asn1Spec, tagSet) + if head: + raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length) + return component, tail + + +class ObjectIdentifierDecoder(AbstractSimpleDecoder): + protoComponent = univ.ObjectIdentifier(()) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + raise error.PyAsn1Error('Empty substrate') + + head = octs2ints(head) + + oid = () + index = 0 + substrateLen = len(head) + while index < substrateLen: + subId = head[index] + index += 1 + if subId < 128: + oid = oid + (subId,) + elif subId > 128: + # Construct subid from a number of octets + nextSubId = subId + subId = 0 + while nextSubId >= 128: + subId = (subId << 7) + (nextSubId & 0x7F) + if index >= substrateLen: + raise error.SubstrateUnderrunError( + 'Short substrate for sub-OID past %s' % (oid,) + ) + nextSubId = head[index] + index += 1 + oid += ((subId << 7) + nextSubId,) + elif subId == 128: + # ASN.1 spec forbids leading zeros (0x80) in OID + # encoding, tolerating it opens a vulnerability. See + # http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf + # page 7 + raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding') + + # Decode two leading arcs + if 0 <= oid[0] <= 39: + oid = (0,) + oid + elif 40 <= oid[0] <= 79: + oid = (1, oid[0] - 40) + oid[1:] + elif oid[0] >= 80: + oid = (2, oid[0] - 80) + oid[1:] + else: + raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0]) + + return self._createComponent(asn1Spec, tagSet, oid), tail + + +class RealDecoder(AbstractSimpleDecoder): + protoComponent = univ.Real() + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + return self._createComponent(asn1Spec, tagSet, 0.0), tail + fo = oct2int(head[0]) + head = head[1:] + if fo & 0x80: # binary encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") + n = (fo & 0x03) + 1 + if n == 4: + n = oct2int(head[0]) + head = head[1:] + eo, head = head[:n], head[n:] + if not eo or not head: + raise error.PyAsn1Error('Real exponent screwed') + e = oct2int(eo[0]) & 0x80 and -1 or 0 + while eo: # exponent + e <<= 8 + e |= oct2int(eo[0]) + eo = eo[1:] + b = fo >> 4 & 0x03 # base bits + if b > 2: + raise error.PyAsn1Error('Illegal Real base') + if b == 1: # encbase = 8 + e *= 3 + elif b == 2: # encbase = 16 + e *= 4 + p = 0 + while head: # value + p <<= 8 + p |= oct2int(head[0]) + head = head[1:] + if fo & 0x40: # sign bit + p = -p + sf = fo >> 2 & 0x03 # scale bits + p *= 2 ** sf + value = (p, 2, e) + elif fo & 0x40: # infinite value + value = fo & 0x01 and '-inf' or 'inf' + elif fo & 0xc0 == 0: # character encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") + try: + if fo & 0x3 == 0x1: # NR1 + value = (int(head), 10, 0) + elif fo & 0x3 == 0x2: # NR2 + value = float(head) + elif fo & 0x3 == 0x3: # NR3 + value = float(head) + else: + raise error.SubstrateUnderrunError( + 'Unknown NR (tag %s)' % fo + ) + except ValueError: + raise error.SubstrateUnderrunError( + 'Bad character Real syntax' + ) + else: + raise error.SubstrateUnderrunError( + 'Unknown encoding (tag %s)' % fo + ) + return self._createComponent(asn1Spec, tagSet, value), tail + + +class SequenceAndSetDecoderBase(AbstractConstructedDecoder): + protoComponent = None + orderedComponents = False + + def _getComponentTagMap(self, asn1Object, idx): + raise NotImplementedError() + + def _getComponentPositionByType(self, asn1Object, tagSet, idx): + raise NotImplementedError() + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + asn1Object = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(asn1Object, substrate, length) + + namedTypes = asn1Object.getComponentType() + + if not self.orderedComponents or not namedTypes or namedTypes.hasOptionalOrDefault: + seenIndices = set() + idx = 0 + while head: + asn1Spec = self._getComponentTagMap(asn1Object, idx) + component, head = decodeFun(head, asn1Spec) + idx = self._getComponentPositionByType( + asn1Object, component.effectiveTagSet, idx + ) + + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + seenIndices.add(idx) + idx += 1 + + if namedTypes and not namedTypes.requiredComponents.issubset(seenIndices): + raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__) + else: + for idx, asn1Spec in enumerate(namedTypes.values()): + component, head = decodeFun(head, asn1Spec) + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + + if not namedTypes: + asn1Object.verifySizeSpec() + + return asn1Object, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + asn1Object = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(asn1Object, substrate, length) + + namedTypes = asn1Object.getComponentType() + + if not namedTypes or namedTypes.hasOptionalOrDefault: + seenIndices = set() + idx = 0 + while substrate: + asn1Spec = self._getComponentTagMap(asn1Object, idx) + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if component is eoo.endOfOctets: + break + idx = self._getComponentPositionByType( + asn1Object, component.effectiveTagSet, idx + ) + + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + seenIndices.add(idx) + idx += 1 + + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + + if namedTypes and not namedTypes.requiredComponents.issubset(seenIndices): + raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__) + else: + for idx, asn1Spec in enumerate(namedTypes.values()): + component, substrate = decodeFun(substrate, asn1Spec) + + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + + component, substrate = decodeFun(substrate, eoo.endOfOctets, allowEoo=True) + if component is not eoo.endOfOctets: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + + if not namedTypes: + asn1Object.verifySizeSpec() + + return asn1Object, substrate + +class SequenceDecoder(SequenceAndSetDecoderBase): + protoComponent = univ.Sequence() + orderedComponents = True + + def _getComponentTagMap(self, asn1Object, idx): + try: + return asn1Object.getComponentTagMapNearPosition(idx) + except error.PyAsn1Error: + return + + def _getComponentPositionByType(self, asn1Object, tagSet, idx): + return asn1Object.getComponentPositionNearType(tagSet, idx) + + +class SequenceOfDecoder(AbstractConstructedDecoder): + protoComponent = univ.SequenceOf() + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + asn1Object = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(asn1Object, substrate, length) + asn1Spec = asn1Object.getComponentType() + idx = 0 + while head: + component, head = decodeFun(head, asn1Spec) + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + idx += 1 + asn1Object.verifySizeSpec() + return asn1Object, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + asn1Object = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(asn1Object, substrate, length) + asn1Spec = asn1Object.getComponentType() + idx = 0 + while substrate: + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if component is eoo.endOfOctets: + break + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + idx += 1 + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + asn1Object.verifySizeSpec() + return asn1Object, substrate + + +class SetDecoder(SequenceAndSetDecoderBase): + protoComponent = univ.Set() + orderedComponents = False + + def _getComponentTagMap(self, asn1Object, idx): + return asn1Object.componentTagMap + + def _getComponentPositionByType(self, asn1Object, tagSet, idx): + nextIdx = asn1Object.getComponentPositionByType(tagSet) + if nextIdx is None: + return idx + else: + return nextIdx + + +class SetOfDecoder(SequenceOfDecoder): + protoComponent = univ.SetOf() + + +class ChoiceDecoder(AbstractConstructedDecoder): + protoComponent = univ.Choice() + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + asn1Object = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(asn1Object, substrate, length) + if asn1Object.tagSet == tagSet: # explicitly tagged Choice + component, head = decodeFun( + head, asn1Object.componentTagMap + ) + else: + component, head = decodeFun( + head, asn1Object.componentTagMap, tagSet, length, state + ) + effectiveTagSet = component.effectiveTagSet + asn1Object.setComponentByType( + effectiveTagSet, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False, + innerFlag=False + ) + return asn1Object, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + asn1Object = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(asn1Object, substrate, length) + if asn1Object.tagSet == tagSet: # explicitly tagged Choice + component, substrate = decodeFun(substrate, asn1Object.componentTagMap) + # eat up EOO marker + eooMarker, substrate = decodeFun(substrate, allowEoo=True) + if eooMarker is not eoo.endOfOctets: + raise error.PyAsn1Error('No EOO seen before substrate ends') + else: + component, substrate = decodeFun( + substrate, asn1Object.componentTagMap, tagSet, length, state + ) + effectiveTagSet = component.effectiveTagSet + asn1Object.setComponentByType( + effectiveTagSet, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False, + innerFlag=False + ) + return asn1Object, substrate + + +class AnyDecoder(AbstractSimpleDecoder): + protoComponent = univ.Any() + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if asn1Spec is None or asn1Spec is not None and tagSet != asn1Spec.tagSet: + # untagged Any container, recover inner header substrate + length += len(fullSubstrate) - len(substrate) + substrate = fullSubstrate + if substrateFun: + return substrateFun(self._createComponent(asn1Spec, tagSet), + substrate, length) + head, tail = substrate[:length], substrate[length:] + return self._createComponent(asn1Spec, tagSet, value=head), tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if asn1Spec is not None and tagSet == asn1Spec.tagSet: + # tagged Any type -- consume header substrate + header = null + else: + # untagged Any, recover header substrate + header = fullSubstrate[:-len(substrate)] + + # Any components do not inherit initial tag + asn1Spec = self.protoComponent + + if substrateFun and substrateFun is not self.substrateCollector: + asn1Object = self._createComponent(asn1Spec, tagSet) + return substrateFun(asn1Object, header + substrate, length + len(header)) + + # All inner fragments are of the same type, treat them as octet string + substrateFun = self.substrateCollector + + while substrate: + component, substrate = decodeFun(substrate, asn1Spec, + substrateFun=substrateFun, + allowEoo=True) + if component is eoo.endOfOctets: + break + header += component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + if substrateFun: + return header, substrate + else: + return self._createComponent(asn1Spec, tagSet, header), substrate + + +# character string types +class UTF8StringDecoder(OctetStringDecoder): + protoComponent = char.UTF8String() + + +class NumericStringDecoder(OctetStringDecoder): + protoComponent = char.NumericString() + + +class PrintableStringDecoder(OctetStringDecoder): + protoComponent = char.PrintableString() + + +class TeletexStringDecoder(OctetStringDecoder): + protoComponent = char.TeletexString() + + +class VideotexStringDecoder(OctetStringDecoder): + protoComponent = char.VideotexString() + + +class IA5StringDecoder(OctetStringDecoder): + protoComponent = char.IA5String() + + +class GraphicStringDecoder(OctetStringDecoder): + protoComponent = char.GraphicString() + + +class VisibleStringDecoder(OctetStringDecoder): + protoComponent = char.VisibleString() + + +class GeneralStringDecoder(OctetStringDecoder): + protoComponent = char.GeneralString() + + +class UniversalStringDecoder(OctetStringDecoder): + protoComponent = char.UniversalString() + + +class BMPStringDecoder(OctetStringDecoder): + protoComponent = char.BMPString() + + +# "useful" types +class ObjectDescriptorDecoder(OctetStringDecoder): + protoComponent = useful.ObjectDescriptor() + + +class GeneralizedTimeDecoder(OctetStringDecoder): + protoComponent = useful.GeneralizedTime() + + +class UTCTimeDecoder(OctetStringDecoder): + protoComponent = useful.UTCTime() + + +tagMap = { + univ.Integer.tagSet: IntegerDecoder(), + univ.Boolean.tagSet: BooleanDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Null.tagSet: NullDecoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(), + univ.Enumerated.tagSet: IntegerDecoder(), + univ.Real.tagSet: RealDecoder(), + univ.Sequence.tagSet: SequenceDecoder(), # conflicts with SequenceOf + univ.Set.tagSet: SetDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any + # character string types + char.UTF8String.tagSet: UTF8StringDecoder(), + char.NumericString.tagSet: NumericStringDecoder(), + char.PrintableString.tagSet: PrintableStringDecoder(), + char.TeletexString.tagSet: TeletexStringDecoder(), + char.VideotexString.tagSet: VideotexStringDecoder(), + char.IA5String.tagSet: IA5StringDecoder(), + char.GraphicString.tagSet: GraphicStringDecoder(), + char.VisibleString.tagSet: VisibleStringDecoder(), + char.GeneralString.tagSet: GeneralStringDecoder(), + char.UniversalString.tagSet: UniversalStringDecoder(), + char.BMPString.tagSet: BMPStringDecoder(), + # useful types + useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(), + useful.UTCTime.tagSet: UTCTimeDecoder() +} + +# Type-to-codec map for ambiguous ASN.1 types +typeMap = { + univ.Set.typeId: SetDecoder(), + univ.SetOf.typeId: SetOfDecoder(), + univ.Sequence.typeId: SequenceDecoder(), + univ.SequenceOf.typeId: SequenceOfDecoder(), + univ.Choice.typeId: ChoiceDecoder(), + univ.Any.typeId: AnyDecoder() +} + +# Put in non-ambiguous types for faster codec lookup +for typeDecoder in tagMap.values(): + typeId = typeDecoder.protoComponent.__class__.typeId + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder + + +(stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec, + stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue, + stDumpRawValue, stErrorCondition, stStop) = [x for x in range(10)] + + +class Decoder(object): + defaultErrorState = stErrorCondition + # defaultErrorState = stDumpRawValue + defaultRawDecoder = AnyDecoder() + supportIndefLength = True + + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + # Tag & TagSet objects caches + self.__tagCache = {} + self.__tagSetCache = {} + self.__eooSentinel = ints2octs((0, 0)) + + def __call__(self, substrate, asn1Spec=None, tagSet=None, + length=None, state=stDecodeTag, recursiveFlag=True, + substrateFun=None, allowEoo=False): + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) + + substrate = ensureString(substrate) + + # Look for end-of-octets sentinel + if allowEoo and self.supportIndefLength: + if substrate.startswith(self.__eooSentinel): + debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found') + return eoo.endOfOctets, substrate[2:] + + value = base.noValue + + fullSubstrate = substrate + while state != stStop: + if state == stDecodeTag: + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on tag decoding' + ) + # Decode tag + isShortTag = True + firstOctet = substrate[0] + substrate = substrate[1:] + try: + lastTag = self.__tagCache[firstOctet] + except KeyError: + integerTag = oct2int(firstOctet) + tagClass = integerTag & 0xC0 + tagFormat = integerTag & 0x20 + tagId = integerTag & 0x1F + if tagId == 0x1F: + isShortTag = False + lengthOctetIdx = 0 + tagId = 0 + try: + while True: + integerTag = oct2int(substrate[lengthOctetIdx]) + lengthOctetIdx += 1 + tagId <<= 7 + tagId |= (integerTag & 0x7F) + if not integerTag & 0x80: + break + substrate = substrate[lengthOctetIdx:] + except IndexError: + raise error.SubstrateUnderrunError( + 'Short octet stream on long tag decoding' + ) + lastTag = tag.Tag( + tagClass=tagClass, tagFormat=tagFormat, tagId=tagId + ) + if isShortTag: + # cache short tags + self.__tagCache[firstOctet] = lastTag + if tagSet is None: + if isShortTag: + try: + tagSet = self.__tagSetCache[firstOctet] + except KeyError: + # base tag not recovered + tagSet = tag.TagSet((), lastTag) + self.__tagSetCache[firstOctet] = tagSet + else: + tagSet = tag.TagSet((), lastTag) + else: + tagSet = lastTag + tagSet + state = stDecodeLength + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'tag decoded into %s, decoding length' % tagSet) + if state == stDecodeLength: + # Decode length + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on length decoding' + ) + firstOctet = oct2int(substrate[0]) + if firstOctet < 128: + size = 1 + length = firstOctet + elif firstOctet == 128: + size = 1 + length = -1 + else: + size = firstOctet & 0x7F + # encoded in size bytes + encodedLength = octs2ints(substrate[1:size + 1]) + # missing check on maximum size, which shouldn't be a + # problem, we can handle more than is possible + if len(encodedLength) != size: + raise error.SubstrateUnderrunError( + '%s<%s at %s' % (size, len(encodedLength), tagSet) + ) + length = 0 + for lengthOctet in encodedLength: + length <<= 8 + length |= lengthOctet + size += 1 + substrate = substrate[size:] + if length == -1: + if not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + else: + if len(substrate) < length: + raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate))) + state = stGetValueDecoder + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])) + ) + if state == stGetValueDecoder: + if asn1Spec is None: + state = stGetValueDecoderByTag + else: + state = stGetValueDecoderByAsn1Spec + # + # There're two ways of creating subtypes in ASN.1 what influences + # decoder operation. These methods are: + # 1) Either base types used in or no IMPLICIT tagging has been + # applied on subtyping. + # 2) Subtype syntax drops base type information (by means of + # IMPLICIT tagging. + # The first case allows for complete tag recovery from substrate + # while the second one requires original ASN.1 type spec for + # decoding. + # + # In either case a set of tags (tagSet) is coming from substrate + # in an incremental, tag-by-tag fashion (this is the case of + # EXPLICIT tag which is most basic). Outermost tag comes first + # from the wire. + # + if state == stGetValueDecoderByTag: + try: + concreteDecoder = self.__tagMap[tagSet] + except KeyError: + concreteDecoder = None + if concreteDecoder: + state = stDecodeValue + else: + try: + concreteDecoder = self.__tagMap[tagSet[:1]] + except KeyError: + concreteDecoder = None + if concreteDecoder: + state = stDecodeValue + else: + state = stTryAsExplicitTag + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as explicit tag')) + debug.scope.push( + concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__) + if state == stGetValueDecoderByAsn1Spec: + if asn1Spec.__class__ is dict or asn1Spec.__class__ is tagmap.TagMap: + try: + chosenSpec = asn1Spec[tagSet] + except KeyError: + chosenSpec = None + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('candidate ASN.1 spec is a map of:') + for firstOctet, v in asn1Spec.presentTypes.items(): + debug.logger(' %s -> %s' % (firstOctet, v.__class__.__name__)) + if asn1Spec.skipTypes: + debug.logger('but neither of: ') + for firstOctet, v in asn1Spec.skipTypes.items(): + debug.logger(' %s -> %s' % (firstOctet, v.__class__.__name__)) + debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '' or chosenSpec.prettyPrintType(), tagSet)) + else: + if tagSet == asn1Spec.tagSet or tagSet in asn1Spec.tagMap: + chosenSpec = asn1Spec + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) + else: + chosenSpec = None + + if chosenSpec is not None: + try: + # ambiguous type or just faster codec lookup + concreteDecoder = self.__typeMap[chosenSpec.typeId] + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,)) + except KeyError: + # use base type for codec lookup to recover untagged types + baseTagSet = tag.TagSet(chosenSpec.tagSet.baseTag, chosenSpec.tagSet.baseTag) + try: + # base type or tagged subtype + concreteDecoder = self.__tagMap[baseTagSet] + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'value decoder chosen by base %s' % (baseTagSet,)) + except KeyError: + concreteDecoder = None + if concreteDecoder: + asn1Spec = chosenSpec + state = stDecodeValue + else: + state = stTryAsExplicitTag + else: + concreteDecoder = None + state = stTryAsExplicitTag + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('codec %s chosen by ASN.1 spec, decoding %s' % (state == stDecodeValue and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as explicit tag')) + debug.scope.push(chosenSpec is None and '?' or chosenSpec.__class__.__name__) + if state == stTryAsExplicitTag: + if tagSet and tagSet[0].tagFormat == tag.tagFormatConstructed and tagSet[0].tagClass != tag.tagClassUniversal: + # Assume explicit tagging + concreteDecoder = explicitTagDecoder + state = stDecodeValue + else: + concreteDecoder = None + state = self.defaultErrorState + debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as failure')) + if state == stDumpRawValue: + concreteDecoder = self.defaultRawDecoder + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'codec %s chosen, decoding value' % concreteDecoder.__class__.__name__) + state = stDecodeValue + if state == stDecodeValue: + if not recursiveFlag and not substrateFun: # legacy + def substrateFun(a, b, c): + return a, b[:c] + if length == -1: # indef length + value, substrate = concreteDecoder.indefLenValueDecoder( + fullSubstrate, substrate, asn1Spec, tagSet, length, + stGetValueDecoder, self, substrateFun + ) + else: + value, substrate = concreteDecoder.valueDecoder( + fullSubstrate, substrate, asn1Spec, tagSet, length, + stGetValueDecoder, self, substrateFun + ) + state = stStop + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '')) + if state == stErrorCondition: + raise error.PyAsn1Error( + '%s not in asn1Spec: %s' % (tagSet, asn1Spec) + ) + if debug.logger and debug.logger & debug.flagDecoder: + debug.scope.pop() + debug.logger('decoder left scope %s, call completed' % debug.scope) + return value, substrate + + +#: Turns BER octet stream into an ASN.1 object. +#: +#: Takes BER octetstream and decode it into an ASN.1 object +#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: BER octetstream +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure +#: being decoded, *asn1Spec* may or may not be required. Most common reason for +#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode. +#: +#: Returns +#: ------- +#: : :py:class:`tuple` +#: A tuple of pyasn1 object recovered from BER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: and the unprocessed trailing portion of the *substrate* (may be empty) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors +decode = Decoder(tagMap, typeMap) + +# XXX +# non-recursive decoding; return position rather than substrate diff --git a/src/lib/pyasn1/codec/ber/encoder.py b/src/lib/pyasn1/codec/ber/encoder.py new file mode 100644 index 00000000..2bf2bc7f --- /dev/null +++ b/src/lib/pyasn1/codec/ber/encoder.py @@ -0,0 +1,506 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import base, tag, univ, char, useful +from pyasn1.codec.ber import eoo +from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs +from pyasn1.compat.integer import to_bytes +from pyasn1 import debug, error + +__all__ = ['encode'] + + +class AbstractItemEncoder(object): + supportIndefLenMode = 1 + + # noinspection PyMethodMayBeStatic + def encodeTag(self, singleTag, isConstructed): + tagClass, tagFormat, tagId = singleTag + encodedTag = tagClass | tagFormat + if isConstructed: + encodedTag |= tag.tagFormatConstructed + if tagId < 31: + return (encodedTag | tagId,) + else: + substrate = (tagId & 0x7f,) + tagId >>= 7 + while tagId: + substrate = (0x80 | (tagId & 0x7f),) + substrate + tagId >>= 7 + return (encodedTag | 0x1F,) + substrate + + def encodeLength(self, length, defMode): + if not defMode and self.supportIndefLenMode: + return (0x80,) + if length < 0x80: + return (length,) + else: + substrate = () + while length: + substrate = (length & 0xff,) + substrate + length >>= 8 + substrateLen = len(substrate) + if substrateLen > 126: + raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen) + return (0x80 | substrateLen,) + substrate + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + raise error.PyAsn1Error('Not implemented') + + def _encodeEndOfOctets(self, encodeFun, defMode): + if defMode or not self.supportIndefLenMode: + return null + else: + return encodeFun(eoo.endOfOctets, defMode) + + def encode(self, encodeFun, value, defMode, maxChunkSize): + substrate, isConstructed, isOctets = self.encodeValue( + encodeFun, value, defMode, maxChunkSize + ) + tagSet = value.tagSet + # tagged value? + if tagSet: + if not isConstructed: # primitive form implies definite mode + defMode = True + header = self.encodeTag(tagSet[-1], isConstructed) + header += self.encodeLength(len(substrate), defMode) + + if isOctets: + substrate = ints2octs(header) + substrate + else: + substrate = ints2octs(header + substrate) + + eoo = self._encodeEndOfOctets(encodeFun, defMode) + if eoo: + substrate += eoo + + return substrate + + +class EndOfOctetsEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return null, False, True + + +class ExplicitlyTaggedItemEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if isinstance(value, base.AbstractConstructedAsn1Item): + value = value.clone(tagSet=value.tagSet[:-1], cloneValueFlag=1) + else: + value = value.clone(tagSet=value.tagSet[:-1]) + return encodeFun(value, defMode, maxChunkSize), True, True + + +explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder() + + +class BooleanEncoder(AbstractItemEncoder): + supportIndefLenMode = False + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return value and (1,) or (0,), False, False + + +class IntegerEncoder(AbstractItemEncoder): + supportIndefLenMode = False + supportCompactZero = False + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if value == 0: + # de-facto way to encode zero + if self.supportCompactZero: + return (), False, False + else: + return (0,), False, False + + return to_bytes(int(value), signed=True), False, True + + +class BitStringEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + valueLength = len(value) + if valueLength % 8: + alignedValue = value << (8 - valueLength % 8) + else: + alignedValue = value + + if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8: + substrate = alignedValue.asOctets() + return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True + + stop = 0 + substrate = null + while stop < valueLength: + start = stop + stop = min(start + maxChunkSize * 8, valueLength) + substrate += encodeFun(alignedValue[start:stop], defMode, maxChunkSize) + + return substrate, True, True + + +class OctetStringEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if not maxChunkSize or len(value) <= maxChunkSize: + return value.asOctets(), False, True + else: + pos = 0 + substrate = null + while True: + v = value.clone(value[pos:pos + maxChunkSize]) + if not v: + break + substrate += encodeFun(v, defMode, maxChunkSize) + pos += maxChunkSize + + return substrate, True, True + + +class NullEncoder(AbstractItemEncoder): + supportIndefLenMode = False + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return null, False, True + + +class ObjectIdentifierEncoder(AbstractItemEncoder): + supportIndefLenMode = False + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + oid = value.asTuple() + + # Build the first pair + try: + first = oid[0] + second = oid[1] + + except IndexError: + raise error.PyAsn1Error('Short OID %s' % (value,)) + + if 0 <= second <= 39: + if first == 1: + oid = (second + 40,) + oid[2:] + elif first == 0: + oid = (second,) + oid[2:] + elif first == 2: + oid = (second + 80,) + oid[2:] + else: + raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,)) + elif first == 2: + oid = (second + 80,) + oid[2:] + else: + raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,)) + + octets = () + + # Cycle through subIds + for subOid in oid: + if 0 <= subOid <= 127: + # Optimize for the common case + octets += (subOid,) + elif subOid > 127: + # Pack large Sub-Object IDs + res = (subOid & 0x7f,) + subOid >>= 7 + while subOid: + res = (0x80 | (subOid & 0x7f),) + res + subOid >>= 7 + # Add packed Sub-Object ID to resulted Object ID + octets += res + else: + raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value)) + + return octets, False, False + + +class RealEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + binEncBase = 2 # set to None to choose encoding base automatically + + @staticmethod + def _dropFloatingPoint(m, encbase, e): + ms, es = 1, 1 + if m < 0: + ms = -1 # mantissa sign + if e < 0: + es = -1 # exponenta sign + m *= ms + if encbase == 8: + m *= 2 ** (abs(e) % 3 * es) + e = abs(e) // 3 * es + elif encbase == 16: + m *= 2 ** (abs(e) % 4 * es) + e = abs(e) // 4 * es + + while True: + if int(m) != m: + m *= encbase + e -= 1 + continue + break + return ms, int(m), encbase, e + + def _chooseEncBase(self, value): + m, b, e = value + encBase = [2, 8, 16] + if value.binEncBase in encBase: + return self._dropFloatingPoint(m, value.binEncBase, e) + elif self.binEncBase in encBase: + return self._dropFloatingPoint(m, self.binEncBase, e) + # auto choosing base 2/8/16 + mantissa = [m, m, m] + exponenta = [e, e, e] + sign = 1 + encbase = 2 + e = float('inf') + for i in range(3): + (sign, + mantissa[i], + encBase[i], + exponenta[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponenta[i]) + if abs(exponenta[i]) < abs(e) or (abs(exponenta[i]) == abs(e) and mantissa[i] < m): + e = exponenta[i] + m = int(mantissa[i]) + encbase = encBase[i] + return sign, m, encbase, e + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if value.isPlusInfinity(): + return (0x40,), False, False + if value.isMinusInfinity(): + return (0x41,), False, False + m, b, e = value + if not m: + return null, False, True + if b == 10: + return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True + elif b == 2: + fo = 0x80 # binary encoding + ms, m, encbase, e = self._chooseEncBase(value) + if ms < 0: # mantissa sign + fo |= 0x40 # sign bit + # exponenta & mantissa normalization + if encbase == 2: + while m & 0x1 == 0: + m >>= 1 + e += 1 + elif encbase == 8: + while m & 0x7 == 0: + m >>= 3 + e += 1 + fo |= 0x10 + else: # encbase = 16 + while m & 0xf == 0: + m >>= 4 + e += 1 + fo |= 0x20 + sf = 0 # scale factor + while m & 0x1 == 0: + m >>= 1 + sf += 1 + if sf > 3: + raise error.PyAsn1Error('Scale factor overflow') # bug if raised + fo |= sf << 2 + eo = null + if e == 0 or e == -1: + eo = int2oct(e & 0xff) + else: + while e not in (0, -1): + eo = int2oct(e & 0xff) + eo + e >>= 8 + if e == 0 and eo and oct2int(eo[0]) & 0x80: + eo = int2oct(0) + eo + if e == -1 and eo and not (oct2int(eo[0]) & 0x80): + eo = int2oct(0xff) + eo + n = len(eo) + if n > 0xff: + raise error.PyAsn1Error('Real exponent overflow') + if n == 1: + pass + elif n == 2: + fo |= 1 + elif n == 3: + fo |= 2 + else: + fo |= 3 + eo = int2oct(n & 0xff) + eo + po = null + while m: + po = int2oct(m & 0xff) + po + m >>= 8 + substrate = int2oct(fo) + eo + po + return substrate, False, True + else: + raise error.PyAsn1Error('Prohibited Real base %s' % b) + + +class SequenceEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + value.verifySizeSpec() + namedTypes = value.getComponentType() + substrate = null + idx = len(value) + while idx > 0: + idx -= 1 + if namedTypes: + if namedTypes[idx].isOptional and not value[idx].isValue: + continue + if namedTypes[idx].isDefaulted and value[idx] == namedTypes[idx].asn1Object: + continue + substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate + return substrate, True, True + + +class SequenceOfEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + value.verifySizeSpec() + substrate = null + idx = len(value) + while idx > 0: + idx -= 1 + substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate + return substrate, True, True + + +class ChoiceEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return encodeFun(value.getComponent(), defMode, maxChunkSize), True, True + + +class AnyEncoder(OctetStringEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return value.asOctets(), defMode == False, True + + +tagMap = { + eoo.endOfOctets.tagSet: EndOfOctetsEncoder(), + univ.Boolean.tagSet: BooleanEncoder(), + univ.Integer.tagSet: IntegerEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Null.tagSet: NullEncoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(), + univ.Enumerated.tagSet: IntegerEncoder(), + univ.Real.tagSet: RealEncoder(), + # Sequence & Set have same tags as SequenceOf & SetOf + univ.SequenceOf.tagSet: SequenceOfEncoder(), + univ.SetOf.tagSet: SequenceOfEncoder(), + univ.Choice.tagSet: ChoiceEncoder(), + # character string types + char.UTF8String.tagSet: OctetStringEncoder(), + char.NumericString.tagSet: OctetStringEncoder(), + char.PrintableString.tagSet: OctetStringEncoder(), + char.TeletexString.tagSet: OctetStringEncoder(), + char.VideotexString.tagSet: OctetStringEncoder(), + char.IA5String.tagSet: OctetStringEncoder(), + char.GraphicString.tagSet: OctetStringEncoder(), + char.VisibleString.tagSet: OctetStringEncoder(), + char.GeneralString.tagSet: OctetStringEncoder(), + char.UniversalString.tagSet: OctetStringEncoder(), + char.BMPString.tagSet: OctetStringEncoder(), + # useful types + useful.ObjectDescriptor.tagSet: OctetStringEncoder(), + useful.GeneralizedTime.tagSet: OctetStringEncoder(), + useful.UTCTime.tagSet: OctetStringEncoder() +} + +# Put in ambiguous & non-ambiguous types for faster codec lookup +typeMap = { + univ.Boolean.typeId: BooleanEncoder(), + univ.Integer.typeId: IntegerEncoder(), + univ.BitString.typeId: BitStringEncoder(), + univ.OctetString.typeId: OctetStringEncoder(), + univ.Null.typeId: NullEncoder(), + univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(), + univ.Enumerated.typeId: IntegerEncoder(), + univ.Real.typeId: RealEncoder(), + # Sequence & Set have same tags as SequenceOf & SetOf + univ.Set.typeId: SequenceEncoder(), + univ.SetOf.typeId: SequenceOfEncoder(), + univ.Sequence.typeId: SequenceEncoder(), + univ.SequenceOf.typeId: SequenceOfEncoder(), + univ.Choice.typeId: ChoiceEncoder(), + univ.Any.typeId: AnyEncoder(), + # character string types + char.UTF8String.typeId: OctetStringEncoder(), + char.NumericString.typeId: OctetStringEncoder(), + char.PrintableString.typeId: OctetStringEncoder(), + char.TeletexString.typeId: OctetStringEncoder(), + char.VideotexString.typeId: OctetStringEncoder(), + char.IA5String.typeId: OctetStringEncoder(), + char.GraphicString.typeId: OctetStringEncoder(), + char.VisibleString.typeId: OctetStringEncoder(), + char.GeneralString.typeId: OctetStringEncoder(), + char.UniversalString.typeId: OctetStringEncoder(), + char.BMPString.typeId: OctetStringEncoder(), + # useful types + useful.ObjectDescriptor.typeId: OctetStringEncoder(), + useful.GeneralizedTime.typeId: OctetStringEncoder(), + useful.UTCTime.typeId: OctetStringEncoder() +} + + +class Encoder(object): + supportIndefLength = True + + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + + def __call__(self, value, defMode=True, maxChunkSize=0): + if not defMode and not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + debug.logger & debug.flagEncoder and debug.logger( + 'encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % ( + not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint())) + tagSet = value.tagSet + if len(tagSet) > 1: + concreteEncoder = explicitlyTaggedItemEncoder + else: + try: + concreteEncoder = self.__typeMap[value.typeId] + except KeyError: + # use base type for codec lookup to recover untagged types + baseTagSet = tag.TagSet(value.tagSet.baseTag, value.tagSet.baseTag) + try: + concreteEncoder = self.__tagMap[baseTagSet] + except KeyError: + raise error.PyAsn1Error('No encoder for %s' % (value,)) + debug.logger & debug.flagEncoder and debug.logger( + 'using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) + substrate = concreteEncoder.encode( + self, value, defMode, maxChunkSize + ) + debug.logger & debug.flagEncoder and debug.logger( + 'built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate))) + return substrate + +#: Turns ASN.1 object into BER octet stream. +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a BER octet stream. +#: +#: Parameters +#: ---------- +# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: A pyasn1 object to encode +#: +#: defMode: :py:class:`bool` +#: If `False`, produces indefinite length encoding +#: +#: maxChunkSize: :py:class:`int` +#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size) +#: +#: Returns +#: ------- +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: Given ASN.1 object encoded into BER octetstream +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors +encode = Encoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/ber/eoo.py b/src/lib/pyasn1/codec/ber/eoo.py new file mode 100644 index 00000000..b02f5cc4 --- /dev/null +++ b/src/lib/pyasn1/codec/ber/eoo.py @@ -0,0 +1,25 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import base, tag + + +class EndOfOctets(base.AbstractSimpleAsn1Item): + defaultValue = 0 + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00) + ) + + _instance = None + + def __new__(cls, *args): + if cls._instance is None: + cls._instance = object.__new__(cls, *args) + + return cls._instance + + +endOfOctets = EndOfOctets() diff --git a/src/lib/pyasn1/codec/cer/__init__.py b/src/lib/pyasn1/codec/cer/__init__.py new file mode 100644 index 00000000..8c3066b2 --- /dev/null +++ b/src/lib/pyasn1/codec/cer/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/codec/cer/decoder.py b/src/lib/pyasn1/codec/cer/decoder.py new file mode 100644 index 00000000..bf9cf4af --- /dev/null +++ b/src/lib/pyasn1/codec/cer/decoder.py @@ -0,0 +1,87 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import univ +from pyasn1.codec.ber import decoder +from pyasn1.compat.octets import oct2int +from pyasn1 import error + +__all__ = ['decode'] + + +class BooleanDecoder(decoder.AbstractSimpleDecoder): + protoComponent = univ.Boolean(0) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head or length != 1: + raise error.PyAsn1Error('Not single-octet Boolean payload') + byte = oct2int(head[0]) + # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while + # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1 + # in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf + if byte == 0xff: + value = 1 + elif byte == 0x00: + value = 0 + else: + raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte) + return self._createComponent(asn1Spec, tagSet, value), tail + +# TODO: prohibit non-canonical encoding +BitStringDecoder = decoder.BitStringDecoder +OctetStringDecoder = decoder.OctetStringDecoder +RealDecoder = decoder.RealDecoder + +tagMap = decoder.tagMap.copy() +tagMap.update( + {univ.Boolean.tagSet: BooleanDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Real.tagSet: RealDecoder()} +) + +typeMap = decoder.typeMap.copy() + +# Put in non-ambiguous types for faster codec lookup +for typeDecoder in tagMap.values(): + typeId = typeDecoder.protoComponent.__class__.typeId + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder + + +class Decoder(decoder.Decoder): + pass + + +#: Turns CER octet stream into an ASN.1 object. +#: +#: Takes CER octetstream and decode it into an ASN.1 object +#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: CER octetstream +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure +#: being decoded, *asn1Spec* may or may not be required. Most common reason for +#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode. +#: +#: Returns +#: ------- +#: : :py:class:`tuple` +#: A tuple of pyasn1 object recovered from CER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: and the unprocessed trailing portion of the *substrate* (may be empty) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors +decode = Decoder(tagMap, decoder.typeMap) diff --git a/src/lib/pyasn1/codec/cer/encoder.py b/src/lib/pyasn1/codec/cer/encoder.py new file mode 100644 index 00000000..e241e43d --- /dev/null +++ b/src/lib/pyasn1/codec/cer/encoder.py @@ -0,0 +1,179 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import univ +from pyasn1.type import useful +from pyasn1.codec.ber import encoder +from pyasn1.compat.octets import int2oct, str2octs, null +from pyasn1 import error + +__all__ = ['encode'] + + +class BooleanEncoder(encoder.IntegerEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + if client == 0: + substrate = (0,) + else: + substrate = (255,) + return substrate, False, False + + +class BitStringEncoder(encoder.BitStringEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + return encoder.BitStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + + +class OctetStringEncoder(encoder.OctetStringEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + + +class RealEncoder(encoder.RealEncoder): + def _chooseEncBase(self, value): + m, b, e = value + return self._dropFloatingPoint(m, b, e) + + +# specialized GeneralStringEncoder here + +class GeneralizedTimeEncoder(OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + zero = str2octs('0') + + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() + # This breaks too many existing data items + # if '.' not in octets: + # raise error.PyAsn1Error('Format must include fraction of second: %r' % octets) + if len(octets) < 15: + raise error.PyAsn1Error('Bad UTC time length: %r' % octets) + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets[-1] != self.zchar[0]: + raise error.PyAsn1Error('Missing timezone specifier: %r' % octets) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + + +class UTCTimeEncoder(encoder.OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets and octets[-1] != self.zchar[0]: + client = client.clone(octets + self.zchar) + if len(client) != 13: + raise error.PyAsn1Error('Bad UTC time length: %r' % client) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + + +class SetOfEncoder(encoder.SequenceOfEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + client.verifySizeSpec() + substrate = null + idx = len(client) + # This is certainly a hack but how else do I distinguish SetOf + # from Set if they have the same tags&constraints? + if isinstance(client, univ.SequenceAndSetBase): + # Set + namedTypes = client.getComponentType() + comps = [] + while idx > 0: + idx -= 1 + if namedTypes[idx].isOptional and not client[idx].isValue: + continue + if namedTypes[idx].isDefaulted and client[idx] == namedTypes[idx].asn1Object: + continue + comps.append(client[idx]) + comps.sort(key=lambda x: isinstance(x, univ.Choice) and x.getMinTagSet() or x.tagSet) + for c in comps: + substrate += encodeFun(c, defMode, maxChunkSize) + else: + # SetOf + compSubs = [] + while idx > 0: + idx -= 1 + compSubs.append( + encodeFun(client[idx], defMode, maxChunkSize) + ) + compSubs.sort() # perhaps padding's not needed + substrate = null + for compSub in compSubs: + substrate += compSub + return substrate, True, True + + +tagMap = encoder.tagMap.copy() +tagMap.update({ + univ.Boolean.tagSet: BooleanEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Real.tagSet: RealEncoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(), + useful.UTCTime.tagSet: UTCTimeEncoder(), + univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set +}) + +typeMap = encoder.typeMap.copy() +typeMap.update({ + univ.Boolean.typeId: BooleanEncoder(), + univ.BitString.typeId: BitStringEncoder(), + univ.OctetString.typeId: OctetStringEncoder(), + univ.Real.typeId: RealEncoder(), + useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(), + useful.UTCTime.typeId: UTCTimeEncoder(), + univ.Set.typeId: SetOfEncoder(), + univ.SetOf.typeId: SetOfEncoder() +}) + + +class Encoder(encoder.Encoder): + def __call__(self, client, defMode=False, maxChunkSize=0): + return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) + + +#: Turns ASN.1 object into CER octet stream. +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a CER octet stream. +#: +#: Parameters +#: ---------- +# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: A pyasn1 object to encode +#: +#: defMode: :py:class:`bool` +#: If `False`, produces indefinite length encoding +#: +#: maxChunkSize: :py:class:`int` +#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size) +#: +#: Returns +#: ------- +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: Given ASN.1 object encoded into BER octetstream +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors +encode = Encoder(tagMap, typeMap) + +# EncoderFactory queries class instance and builds a map of tags -> encoders diff --git a/src/lib/pyasn1/codec/der/__init__.py b/src/lib/pyasn1/codec/der/__init__.py new file mode 100644 index 00000000..8c3066b2 --- /dev/null +++ b/src/lib/pyasn1/codec/der/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/codec/der/decoder.py b/src/lib/pyasn1/codec/der/decoder.py new file mode 100644 index 00000000..24d3cbcb --- /dev/null +++ b/src/lib/pyasn1/codec/der/decoder.py @@ -0,0 +1,69 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import univ +from pyasn1.codec.cer import decoder + +__all__ = ['decode'] + + +class BitStringDecoder(decoder.BitStringDecoder): + supportConstructedForm = False + + +class OctetStringDecoder(decoder.OctetStringDecoder): + supportConstructedForm = False + +# TODO: prohibit non-canonical encoding +RealDecoder = decoder.RealDecoder + +tagMap = decoder.tagMap.copy() +tagMap.update( + {univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Real.tagSet: RealDecoder()} +) + +typeMap = decoder.typeMap.copy() + +# Put in non-ambiguous types for faster codec lookup +for typeDecoder in tagMap.values(): + typeId = typeDecoder.protoComponent.__class__.typeId + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder + + +class Decoder(decoder.Decoder): + supportIndefLength = False + + +#: Turns DER octet stream into an ASN.1 object. +#: +#: Takes DER octetstream and decode it into an ASN.1 object +#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: DER octetstream +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure +#: being decoded, *asn1Spec* may or may not be required. Most common reason for +#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode. +#: +#: Returns +#: ------- +#: : :py:class:`tuple` +#: A tuple of pyasn1 object recovered from DER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: and the unprocessed trailing portion of the *substrate* (may be empty) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors +decode = Decoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/der/encoder.py b/src/lib/pyasn1/codec/der/encoder.py new file mode 100644 index 00000000..2d615e3f --- /dev/null +++ b/src/lib/pyasn1/codec/der/encoder.py @@ -0,0 +1,67 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import univ +from pyasn1.codec.cer import encoder +from pyasn1 import error + +__all__ = ['encode'] + + +class SetOfEncoder(encoder.SetOfEncoder): + @staticmethod + def _cmpSetComponents(c1, c2): + tagSet1 = isinstance(c1, univ.Choice) and c1.effectiveTagSet or c1.tagSet + tagSet2 = isinstance(c2, univ.Choice) and c2.effectiveTagSet or c2.tagSet + return cmp(tagSet1, tagSet2) + + +tagMap = encoder.tagMap.copy() +tagMap.update({ + # Overload CER encoders with BER ones (a bit hackerish XXX) + univ.BitString.tagSet: encoder.encoder.BitStringEncoder(), + univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(), + # Set & SetOf have same tags + univ.SetOf().tagSet: SetOfEncoder() +}) + +typeMap = encoder.typeMap.copy() + + +class Encoder(encoder.Encoder): + supportIndefLength = False + + def __call__(self, client, defMode=True, maxChunkSize=0): + if not defMode or maxChunkSize: + raise error.PyAsn1Error('DER forbids indefinite length mode') + return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) + +#: Turns ASN.1 object into DER octet stream. +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a DER octet stream. +#: +#: Parameters +#: ---------- +# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: A pyasn1 object to encode +#: +#: defMode: :py:class:`bool` +#: If `False`, produces indefinite length encoding +#: +#: maxChunkSize: :py:class:`int` +#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size) +#: +#: Returns +#: ------- +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: Given ASN.1 object encoded into BER octetstream +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors +encode = Encoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/native/__init__.py b/src/lib/pyasn1/codec/native/__init__.py new file mode 100644 index 00000000..8c3066b2 --- /dev/null +++ b/src/lib/pyasn1/codec/native/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/codec/native/decoder.py b/src/lib/pyasn1/codec/native/decoder.py new file mode 100644 index 00000000..be75cb86 --- /dev/null +++ b/src/lib/pyasn1/codec/native/decoder.py @@ -0,0 +1,188 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import base, univ, char, useful, tag +from pyasn1 import debug, error + +__all__ = ['decode'] + + +class AbstractScalarDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc=None): + return asn1Spec.clone(pyObject) + + +class BitStringDecoder(AbstractScalarDecoder): + def __call__(self, pyObject, asn1Spec, decoderFunc=None): + return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject)) + + +class SequenceOrSetDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc): + asn1Value = asn1Spec.clone() + + componentsTypes = asn1Spec.getComponentType() + + for field in asn1Value: + if field in pyObject: + asn1Value[field] = decoderFunc(pyObject[field], componentsTypes[field].asn1Object) + + return asn1Value + + +class SequenceOfOrSetOfDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc): + asn1Value = asn1Spec.clone() + + for pyValue in pyObject: + asn1Value.append(decoderFunc(pyValue, asn1Spec.getComponentType())) + + return asn1Value + + +class ChoiceDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc): + asn1Value = asn1Spec.clone() + + componentsTypes = asn1Spec.getComponentType() + + for field in pyObject: + if field in componentsTypes: + asn1Value[field] = decoderFunc(pyObject[field], componentsTypes[field].asn1Object) + break + + return asn1Value + + +tagMap = { + univ.Integer.tagSet: AbstractScalarDecoder(), + univ.Boolean.tagSet: AbstractScalarDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: AbstractScalarDecoder(), + univ.Null.tagSet: AbstractScalarDecoder(), + univ.ObjectIdentifier.tagSet: AbstractScalarDecoder(), + univ.Enumerated.tagSet: AbstractScalarDecoder(), + univ.Real.tagSet: AbstractScalarDecoder(), + univ.Sequence.tagSet: SequenceOrSetDecoder(), # conflicts with SequenceOf + univ.Set.tagSet: SequenceOrSetDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any + # character string types + char.UTF8String.tagSet: AbstractScalarDecoder(), + char.NumericString.tagSet: AbstractScalarDecoder(), + char.PrintableString.tagSet: AbstractScalarDecoder(), + char.TeletexString.tagSet: AbstractScalarDecoder(), + char.VideotexString.tagSet: AbstractScalarDecoder(), + char.IA5String.tagSet: AbstractScalarDecoder(), + char.GraphicString.tagSet: AbstractScalarDecoder(), + char.VisibleString.tagSet: AbstractScalarDecoder(), + char.GeneralString.tagSet: AbstractScalarDecoder(), + char.UniversalString.tagSet: AbstractScalarDecoder(), + char.BMPString.tagSet: AbstractScalarDecoder(), + # useful types + useful.ObjectDescriptor.tagSet: AbstractScalarDecoder(), + useful.GeneralizedTime.tagSet: AbstractScalarDecoder(), + useful.UTCTime.tagSet: AbstractScalarDecoder() +} + +# Put in ambiguous & non-ambiguous types for faster codec lookup +typeMap = { + univ.Integer.typeId: AbstractScalarDecoder(), + univ.Boolean.typeId: AbstractScalarDecoder(), + univ.BitString.typeId: BitStringDecoder(), + univ.OctetString.typeId: AbstractScalarDecoder(), + univ.Null.typeId: AbstractScalarDecoder(), + univ.ObjectIdentifier.typeId: AbstractScalarDecoder(), + univ.Enumerated.typeId: AbstractScalarDecoder(), + univ.Real.typeId: AbstractScalarDecoder(), + # ambiguous base types + univ.Set.typeId: SequenceOrSetDecoder(), + univ.SetOf.typeId: SequenceOfOrSetOfDecoder(), + univ.Sequence.typeId: SequenceOrSetDecoder(), + univ.SequenceOf.typeId: SequenceOfOrSetOfDecoder(), + univ.Choice.typeId: ChoiceDecoder(), + univ.Any.typeId: AbstractScalarDecoder(), + # character string types + char.UTF8String.typeId: AbstractScalarDecoder(), + char.NumericString.typeId: AbstractScalarDecoder(), + char.PrintableString.typeId: AbstractScalarDecoder(), + char.TeletexString.typeId: AbstractScalarDecoder(), + char.VideotexString.typeId: AbstractScalarDecoder(), + char.IA5String.typeId: AbstractScalarDecoder(), + char.GraphicString.typeId: AbstractScalarDecoder(), + char.VisibleString.typeId: AbstractScalarDecoder(), + char.GeneralString.typeId: AbstractScalarDecoder(), + char.UniversalString.typeId: AbstractScalarDecoder(), + char.BMPString.typeId: AbstractScalarDecoder(), + # useful types + useful.ObjectDescriptor.typeId: AbstractScalarDecoder(), + useful.GeneralizedTime.typeId: AbstractScalarDecoder(), + useful.UTCTime.typeId: AbstractScalarDecoder() +} + + +class Decoder(object): + + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap): + self.__tagMap = tagMap + self.__typeMap = typeMap + + def __call__(self, pyObject, asn1Spec): + if debug.logger & debug.flagDecoder: + debug.scope.push(type(pyObject).__name__) + debug.logger('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__)) + + if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item): + raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__) + + try: + valueDecoder = self.__typeMap[asn1Spec.typeId] + except KeyError: + # use base type for codec lookup to recover untagged types + baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag) + try: + valueDecoder = self.__tagMap[baseTagSet] + except KeyError: + raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet) + + if debug.logger & debug.flagDecoder: + debug.logger('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject))) + + value = valueDecoder(pyObject, asn1Spec, self) + + if debug.logger & debug.flagDecoder: + debug.logger('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value))) + debug.scope.pop() + + return value + + +#: Turns Python objects of built-in types into ASN.1 objects. +#: +#: Takes Python objects of built-in types and turns them into a tree of +#: ASN.1 objects (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: pyObject: :py:class:`object` +#: A scalar or nested Python objects +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. It is required +#: for successful interpretation of Python objects mapping into their ASN.1 +#: representations. +#: +#: Returns +#: ------- +#: : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A scalar or constructed pyasn1 object +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors +decode = Decoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/native/encoder.py b/src/lib/pyasn1/codec/native/encoder.py new file mode 100644 index 00000000..afeb8ae0 --- /dev/null +++ b/src/lib/pyasn1/codec/native/encoder.py @@ -0,0 +1,215 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +try: + from collections import OrderedDict + +except ImportError: + OrderedDict = dict + +from pyasn1.type import base, univ, char, useful +from pyasn1 import debug, error + +__all__ = ['encode'] + + +class AbstractItemEncoder(object): + def encode(self, encodeFun, value): + raise error.PyAsn1Error('Not implemented') + + +class ExplicitlyTaggedItemEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + if isinstance(value, base.AbstractConstructedAsn1Item): + value = value.clone(tagSet=value.tagSet[:-1], + cloneValueFlag=1) + else: + value = value.clone(tagSet=value.tagSet[:-1]) + return encodeFun(value) + +explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder() + + +class BooleanEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return bool(value) + + +class IntegerEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return int(value) + + +class BitStringEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return str(value) + + +class OctetStringEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return value.asOctets() + + +class TextStringEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return value.prettyPrint() + + +class NullEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return None + + +class ObjectIdentifierEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return str(value) + + +class RealEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return float(value) + + +class SetEncoder(AbstractItemEncoder): + protoDict = dict + def encode(self, encodeFun, value): + value.verifySizeSpec() + namedTypes = value.getComponentType() + substrate = self.protoDict() + for idx, (key, subValue) in enumerate(value.items()): + if namedTypes[idx].isOptional and not value[idx].isValue: + continue + substrate[key] = encodeFun(subValue) + return substrate + + +class SequenceEncoder(SetEncoder): + protoDict = OrderedDict + + +class SequenceOfEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + value.verifySizeSpec() + return [encodeFun(x) for x in value] + + +class ChoiceEncoder(SequenceEncoder): + pass + + +class AnyEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return value.asOctets() + + +tagMap = { + univ.Boolean.tagSet: BooleanEncoder(), + univ.Integer.tagSet: IntegerEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Null.tagSet: NullEncoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(), + univ.Enumerated.tagSet: IntegerEncoder(), + univ.Real.tagSet: RealEncoder(), + # Sequence & Set have same tags as SequenceOf & SetOf + univ.SequenceOf.tagSet: SequenceOfEncoder(), + univ.SetOf.tagSet: SequenceOfEncoder(), + univ.Choice.tagSet: ChoiceEncoder(), + # character string types + char.UTF8String.tagSet: TextStringEncoder(), + char.NumericString.tagSet: TextStringEncoder(), + char.PrintableString.tagSet: TextStringEncoder(), + char.TeletexString.tagSet: TextStringEncoder(), + char.VideotexString.tagSet: TextStringEncoder(), + char.IA5String.tagSet: TextStringEncoder(), + char.GraphicString.tagSet: TextStringEncoder(), + char.VisibleString.tagSet: TextStringEncoder(), + char.GeneralString.tagSet: TextStringEncoder(), + char.UniversalString.tagSet: TextStringEncoder(), + char.BMPString.tagSet: TextStringEncoder(), + # useful types + useful.ObjectDescriptor.tagSet: OctetStringEncoder(), + useful.GeneralizedTime.tagSet: OctetStringEncoder(), + useful.UTCTime.tagSet: OctetStringEncoder() +} + +# Type-to-codec map for ambiguous ASN.1 types +typeMap = { + univ.Set.typeId: SetEncoder(), + univ.SetOf.typeId: SequenceOfEncoder(), + univ.Sequence.typeId: SequenceEncoder(), + univ.SequenceOf.typeId: SequenceOfEncoder(), + univ.Choice.typeId: ChoiceEncoder(), + univ.Any.typeId: AnyEncoder() +} + + +class Encoder(object): + + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + + def __call__(self, asn1Value): + if not isinstance(asn1Value, base.Asn1Item): + raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)') + + if debug.logger & debug.flagEncoder: + debug.scope.push(type(asn1Value).__name__) + debug.logger('encoder called for type %s <%s>' % (type(asn1Value).__name__, asn1Value.prettyPrint())) + + tagSet = asn1Value.tagSet + if len(tagSet) > 1: + concreteEncoder = explicitlyTaggedItemEncoder + else: + if asn1Value.typeId is not None and asn1Value.typeId in self.__typeMap: + concreteEncoder = self.__typeMap[asn1Value.typeId] + elif tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + tagSet = asn1Value.baseTagSet + if tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + raise error.PyAsn1Error('No encoder for %s' % (asn1Value,)) + + debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (type(concreteEncoder).__name__, tagSet)) + + pyObject = concreteEncoder.encode(self, asn1Value) + + if debug.logger & debug.flagEncoder: + debug.logger('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject))) + debug.scope.pop() + + return pyObject + + +#: Turns ASN.1 object into a Python built-in type object(s). +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a Python built-in type or a tree +#: of those. +#: +#: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict` +#: can be produced (whenever available) to preserve ordering of the components +#: in ASN.1 SEQUENCE. +#: +#: Parameters +#: ---------- +# asn1Value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: pyasn1 object to encode (or a tree of them) +#: +#: Returns +#: ------- +#: : :py:class:`object` +#: Python built-in type instance (or a tree of them) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors +encode = Encoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/compat/__init__.py b/src/lib/pyasn1/compat/__init__.py new file mode 100644 index 00000000..8c3066b2 --- /dev/null +++ b/src/lib/pyasn1/compat/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/compat/binary.py b/src/lib/pyasn1/compat/binary.py new file mode 100644 index 00000000..65c42c74 --- /dev/null +++ b/src/lib/pyasn1/compat/binary.py @@ -0,0 +1,25 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from sys import version_info + +if version_info[0:2] < (2, 6): + def bin(value): + bitstring = [] + + while value: + if value & 1 == 1: + bitstring.append('1') + else: + bitstring.append('0') + + value >>= 1 + + bitstring.reverse() + + return '0b' + ''.join(bitstring) +else: + bin = bin diff --git a/src/lib/pyasn1/compat/integer.py b/src/lib/pyasn1/compat/integer.py new file mode 100644 index 00000000..ae9c7e1d --- /dev/null +++ b/src/lib/pyasn1/compat/integer.py @@ -0,0 +1,96 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +if sys.version_info[0:2] < (3, 2): + from binascii import a2b_hex, b2a_hex +from pyasn1.compat.octets import oct2int, null + +if sys.version_info[0:2] < (3, 2): + def from_bytes(octets, signed=False): + value = long(b2a_hex(str(octets)), 16) + + if signed and oct2int(octets[0]) & 0x80: + return value - (1 << len(octets) * 8) + + return value + + def to_bytes(value, signed=False, length=0): + if value < 0: + if signed: + bits = bitLength(value) + + # two's complement form + maxValue = 1 << bits + valueToEncode = (value + maxValue) % maxValue + + else: + raise OverflowError('can\'t convert negative int to unsigned') + elif value == 0 and length == 0: + return null + else: + bits = 0 + valueToEncode = value + + hexValue = hex(valueToEncode)[2:] + if hexValue.endswith('L'): + hexValue = hexValue[:-1] + + if len(hexValue) & 1: + hexValue = '0' + hexValue + + # padding may be needed for two's complement encoding + if value != valueToEncode or length: + hexLength = len(hexValue) * 4 + + padLength = max(length, bits) + + if padLength > hexLength: + hexValue = '00' * ((padLength - hexLength - 1) // 8 + 1) + hexValue + elif length and hexLength - length > 7: + raise OverflowError('int too big to convert') + + firstOctet = int(hexValue[:2], 16) + + if signed: + if firstOctet & 0x80: + if value >= 0: + hexValue = '00' + hexValue + elif value < 0: + hexValue = 'ff' + hexValue + + octets_value = a2b_hex(hexValue) + + return octets_value + + def bitLength(number): + # bits in unsigned number + hexValue = hex(abs(number)) + bits = len(hexValue) - 2 + if hexValue.endswith('L'): + bits -= 1 + if bits & 1: + bits += 1 + bits *= 4 + # TODO: strip lhs zeros + return bits + +else: + + def from_bytes(octets, signed=False): + return int.from_bytes(bytes(octets), 'big', signed=signed) + + def to_bytes(value, signed=False, length=0): + length = max(value.bit_length(), length) + + if signed and length % 8 == 0: + length += 1 + + return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed) + + def bitLength(number): + return int(number).bit_length() + diff --git a/src/lib/pyasn1/compat/octets.py b/src/lib/pyasn1/compat/octets.py new file mode 100644 index 00000000..ec497a68 --- /dev/null +++ b/src/lib/pyasn1/compat/octets.py @@ -0,0 +1,46 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from sys import version_info + +if version_info[0] <= 2: + int2oct = chr + # noinspection PyPep8 + ints2octs = lambda s: ''.join([int2oct(x) for x in s]) + null = '' + oct2int = ord + # noinspection PyPep8 + octs2ints = lambda s: [oct2int(x) for x in s] + # noinspection PyPep8 + str2octs = lambda x: x + # noinspection PyPep8 + octs2str = lambda x: x + # noinspection PyPep8 + isOctetsType = lambda s: isinstance(s, str) + # noinspection PyPep8 + isStringType = lambda s: isinstance(s, (str, unicode)) + # noinspection PyPep8 + ensureString = str +else: + ints2octs = bytes + # noinspection PyPep8 + int2oct = lambda x: ints2octs((x,)) + null = ints2octs() + # noinspection PyPep8 + oct2int = lambda x: x + # noinspection PyPep8 + octs2ints = lambda x: x + # noinspection PyPep8 + str2octs = lambda x: x.encode('iso-8859-1') + # noinspection PyPep8 + octs2str = lambda x: x.decode('iso-8859-1') + # noinspection PyPep8 + isOctetsType = lambda s: isinstance(s, bytes) + # noinspection PyPep8 + isStringType = lambda s: isinstance(s, str) + # noinspection PyPep8 + ensureString = bytes + diff --git a/src/lib/pyasn1/debug.py b/src/lib/pyasn1/debug.py new file mode 100644 index 00000000..04a9da5c --- /dev/null +++ b/src/lib/pyasn1/debug.py @@ -0,0 +1,130 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import logging +from pyasn1.compat.octets import octs2ints +from pyasn1 import error +from pyasn1 import __version__ + +__all__ = ['Debug', 'setLogger', 'hexdump'] + +flagNone = 0x0000 +flagEncoder = 0x0001 +flagDecoder = 0x0002 +flagAll = 0xffff + +flagMap = { + 'encoder': flagEncoder, + 'decoder': flagDecoder, + 'all': flagAll +} + + +class Printer(object): + # noinspection PyShadowingNames + def __init__(self, logger=None, handler=None, formatter=None): + if logger is None: + logger = logging.getLogger('pyasn1') + logger.setLevel(logging.DEBUG) + if handler is None: + handler = logging.StreamHandler() + if formatter is None: + formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') + handler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + logger.addHandler(handler) + self.__logger = logger + + def __call__(self, msg): + self.__logger.debug(msg) + + def __str__(self): + return '' + + +if hasattr(logging, 'NullHandler'): + NullHandler = logging.NullHandler +else: + # Python 2.6 and older + class NullHandler(logging.Handler): + def emit(self, record): + pass + + +class Debug(object): + defaultPrinter = None + + def __init__(self, *flags, **options): + self._flags = flagNone + if options.get('printer') is not None: + self._printer = options.get('printer') + elif self.defaultPrinter is not None: + self._printer = self.defaultPrinter + if 'loggerName' in options: + # route our logs to parent logger + self._printer = Printer( + logger=logging.getLogger(options['loggerName']), + handler=NullHandler() + ) + else: + self._printer = Printer() + self('running pyasn1 version %s' % __version__) + for f in flags: + inverse = f and f[0] in ('!', '~') + if inverse: + f = f[1:] + try: + if inverse: + self._flags &= ~flagMap[f] + else: + self._flags |= flagMap[f] + except KeyError: + raise error.PyAsn1Error('bad debug flag %s' % f) + + self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled')) + + def __str__(self): + return 'logger %s, flags %x' % (self._printer, self._flags) + + def __call__(self, msg): + self._printer(msg) + + def __and__(self, flag): + return self._flags & flag + + def __rand__(self, flag): + return flag & self._flags + + +logger = 0 + + +def setLogger(l): + global logger + logger = l + + +def hexdump(octets): + return ' '.join( + ['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x) + for n, x in zip(range(len(octets)), octs2ints(octets))] + ) + + +class Scope(object): + def __init__(self): + self._list = [] + + def __str__(self): return '.'.join(self._list) + + def push(self, token): + self._list.append(token) + + def pop(self): + return self._list.pop() + + +scope = Scope() diff --git a/src/lib/pyasn1/error.py b/src/lib/pyasn1/error.py new file mode 100644 index 00000000..85308557 --- /dev/null +++ b/src/lib/pyasn1/error.py @@ -0,0 +1,18 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# + + +class PyAsn1Error(Exception): + pass + + +class ValueConstraintError(PyAsn1Error): + pass + + +class SubstrateUnderrunError(PyAsn1Error): + pass diff --git a/src/lib/pyasn1/type/__init__.py b/src/lib/pyasn1/type/__init__.py new file mode 100644 index 00000000..8c3066b2 --- /dev/null +++ b/src/lib/pyasn1/type/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/type/base.py b/src/lib/pyasn1/type/base.py new file mode 100644 index 00000000..00c329c2 --- /dev/null +++ b/src/lib/pyasn1/type/base.py @@ -0,0 +1,617 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.type import constraint, tagmap, tag +from pyasn1 import error + +__all__ = ['Asn1Item', 'Asn1ItemBase', 'AbstractSimpleAsn1Item', 'AbstractConstructedAsn1Item'] + + +class Asn1Item(object): + @classmethod + def getTypeId(cls, increment=1): + try: + Asn1Item._typeCounter += increment + except AttributeError: + Asn1Item._typeCounter = increment + return Asn1Item._typeCounter + + +class Asn1ItemBase(Asn1Item): + #: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing + #: ASN.1 tag(s) associated with |ASN.1| type. + tagSet = tag.TagSet() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing constraints on initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = None + + def __init__(self, tagSet=None, subtypeSpec=None): + if tagSet is None: + self._tagSet = self.__class__.tagSet + else: + self._tagSet = tagSet + if subtypeSpec is None: + self._subtypeSpec = self.__class__.subtypeSpec + else: + self._subtypeSpec = subtypeSpec + + @property + def effectiveTagSet(self): + """For |ASN.1| type is equivalent to *tagSet* + """ + return self._tagSet # used by untagged types + + @property + def tagMap(self): + """Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping ASN.1 tags to ASN.1 objects within callee object. + """ + try: + return self._tagMap + + except AttributeError: + self._tagMap = tagmap.TagMap({self._tagSet: self}) + return self._tagMap + + def isSameTypeWith(self, other, matchTags=True, matchConstraints=True): + """Examine |ASN.1| type for equality with other ASN.1 type. + + ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints + (:py:mod:`~pyasn1.type.constraint`) are examined when carrying + out ASN.1 types comparison. + + No Python inheritance relationship between PyASN1 objects is considered. + + Parameters + ---------- + other: a pyasn1 type object + Class instance representing ASN.1 type. + + Returns + ------- + : :class:`bool` + :class:`True` if *other* is |ASN.1| type, + :class:`False` otherwise. + """ + return self is other or \ + (not matchTags or + self._tagSet == other.tagSet) and \ + (not matchConstraints or + self._subtypeSpec == other.subtypeSpec) + + def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True): + """Examine |ASN.1| type for subtype relationship with other ASN.1 type. + + ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints + (:py:mod:`~pyasn1.type.constraint`) are examined when carrying + out ASN.1 types comparison. + + No Python inheritance relationship between PyASN1 objects is considered. + + + Parameters + ---------- + other: a pyasn1 type object + Class instance representing ASN.1 type. + + Returns + ------- + : :class:`bool` + :class:`True` if *other* is a subtype of |ASN.1| type, + :class:`False` otherwise. + """ + return (not matchTags or + self._tagSet.isSuperTagSetOf(other.tagSet)) and \ + (not matchConstraints or + (self._subtypeSpec.isSuperTypeOf(other.subtypeSpec))) + + @staticmethod + def isNoValue(*values): + for value in values: + if value is not None and value is not noValue: + return False + return True + + # backward compatibility + + def getTagSet(self): + return self.tagSet + + def getEffectiveTagSet(self): + return self.effectiveTagSet + + def getTagMap(self): + return self.tagMap + + def getSubtypeSpec(self): + return self.subtypeSpec + + +class NoValue(object): + """Create a singleton instance of NoValue class. + + NoValue object can be used as an initializer on PyASN1 type class + instantiation to represent ASN.1 type rather than ASN.1 data value. + + No operations other than type comparison can be performed on + a PyASN1 type object. + """ + skipMethods = ('__getattribute__', '__getattr__', '__setattr__', '__delattr__', + '__class__', '__init__', '__del__', '__new__', '__repr__', + '__qualname__', '__objclass__', 'im_class', '__sizeof__') + + _instance = None + + def __new__(cls): + if cls._instance is None: + def getPlug(name): + def plug(self, *args, **kw): + raise error.PyAsn1Error('Uninitialized ASN.1 value ("%s" attribute looked up)' % name) + return plug + + op_names = [name + for typ in (str, int, list, dict) + for name in dir(typ) + if name not in cls.skipMethods and name.startswith('__') and name.endswith('__') and callable(getattr(typ, name))] + + for name in set(op_names): + setattr(cls, name, getPlug(name)) + + cls._instance = object.__new__(cls) + + return cls._instance + + def __getattr__(self, attr): + if attr in self.skipMethods: + raise AttributeError('attribute %s not present' % attr) + raise error.PyAsn1Error('No value for "%s"' % attr) + + def __repr__(self): + return '%s()' % self.__class__.__name__ + +noValue = NoValue() + + +# Base class for "simple" ASN.1 objects. These are immutable. +class AbstractSimpleAsn1Item(Asn1ItemBase): + #: Default payload value + defaultValue = noValue + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None): + Asn1ItemBase.__init__(self, tagSet, subtypeSpec) + if value is None or value is noValue: + value = self.defaultValue + else: + value = self.prettyIn(value) + try: + self._subtypeSpec(value) + + except error.PyAsn1Error: + exType, exValue, exTb = sys.exc_info() + raise exType('%s at %s' % (exValue, self.__class__.__name__)) + + self.__hashedValue = None + self._value = value + self._len = None + + def __repr__(self): + representation = [] + if self._value is not self.defaultValue: + representation.append(self.prettyOut(self._value)) + if self._tagSet is not self.__class__.tagSet: + representation.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + representation.append('subtypeSpec=%r' % (self._subtypeSpec,)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(representation)) + + def __str__(self): + return str(self._value) + + def __eq__(self, other): + return self is other and True or self._value == other + + def __ne__(self, other): + return self._value != other + + def __lt__(self, other): + return self._value < other + + def __le__(self, other): + return self._value <= other + + def __gt__(self, other): + return self._value > other + + def __ge__(self, other): + return self._value >= other + + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self._value and True or False + else: + def __bool__(self): + return self._value and True or False + + def __hash__(self): + if self.__hashedValue is None: + self.__hashedValue = hash(self._value) + return self.__hashedValue + + @property + def isValue(self): + """Indicate if |ASN.1| object represents ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if object represents ASN.1 value and type, + :class:`False` if object represents just ASN.1 type. + + """ + return self._value is not noValue + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: + value = self._value + else: + isModified = True + if tagSet is None or tagSet is noValue: + tagSet = self._tagSet + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec) + else: + return self + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: + value = self._value + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: + tagSet = self._tagSet.tagImplicitly(implicitTag) + isModified = True + elif explicitTag is not None and explicitTag is not noValue: + tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True + else: + tagSet = self._tagSet + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec) + else: + return self + + def prettyIn(self, value): + return value + + def prettyOut(self, value): + return str(value) + + def prettyPrint(self, scope=0): + """Provide human-friendly printable object representation. + + Returns + ------- + : :class:`str` + human-friendly type and/or value representation. + """ + if self.isValue: + return self.prettyOut(self._value) + else: + return '' + + # XXX Compatibility stub + def prettyPrinter(self, scope=0): + return self.prettyPrint(scope) + + # noinspection PyUnusedLocal + def prettyPrintType(self, scope=0): + return '%s -> %s' % (self.tagSet, self.__class__.__name__) + + # backward compatibility + + def hasValue(self): + return self.isValue + + +# +# Constructed types: +# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice +# * ASN1 types and values are represened by Python class instances +# * Value initialization is made for defaulted components only +# * Primary method of component addressing is by-position. Data model for base +# type is Python sequence. Additional type-specific addressing methods +# may be implemented for particular types. +# * SequenceOf and SetOf types do not implement any additional methods +# * Sequence, Set and Choice types also implement by-identifier addressing +# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing +# * Sequence and Set types may include optional and defaulted +# components +# * Constructed types hold a reference to component types used for value +# verification and ordering. +# * Component type is a scalar type for SequenceOf/SetOf types and a list +# of types for Sequence/Set/Choice. +# + +def setupComponent(): + """Returns a sentinel value. + + Indicates to a constructed type to set up its inner component so that it + can be referred to. This is useful in situation when you want to populate + descendants of a constructed type what requires being able to refer to + their parent types along the way. + + Example + ------- + + >>> constructed['record'] = setupComponent() + >>> constructed['record']['scalar'] = 42 + """ + return noValue + + +class AbstractConstructedAsn1Item(Asn1ItemBase): + + #: If `True`, requires exact component type matching, + #: otherwise subtype relation is only enforced + strictConstraints = False + + def __init__(self, componentType=None, tagSet=None, + subtypeSpec=None, sizeSpec=None): + Asn1ItemBase.__init__(self, tagSet, subtypeSpec) + if componentType is None: + self._componentType = self.componentType + else: + self._componentType = componentType + if sizeSpec is None: + self._sizeSpec = self.sizeSpec + else: + self._sizeSpec = sizeSpec + self._componentValues = [] + + def __repr__(self): + representation = [] + if self._componentType is not self.componentType: + representation.append('componentType=%r' % (self._componentType,)) + if self._tagSet is not self.__class__.tagSet: + representation.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + representation.append('subtypeSpec=%r' % (self._subtypeSpec,)) + representation = '%s(%s)' % (self.__class__.__name__, ', '.join(representation)) + if self._componentValues: + for idx, component in enumerate(self._componentValues): + if component is None or component is noValue: + continue + representation += '.setComponentByPosition(%d, %s)' % (idx, repr(component)) + return representation + + def __eq__(self, other): + return self is other and True or self._componentValues == other + + def __ne__(self, other): + return self._componentValues != other + + def __lt__(self, other): + return self._componentValues < other + + def __le__(self, other): + return self._componentValues <= other + + def __gt__(self, other): + return self._componentValues > other + + def __ge__(self, other): + return self._componentValues >= other + + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self._componentValues and True or False + else: + def __bool__(self): + return self._componentValues and True or False + + def _cloneComponentValues(self, myClone, cloneValueFlag): + pass + + def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None, cloneValueFlag=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 size constraint(s) + + Returns + ------- + : + new instance of |ASN.1| type/value + + """ + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if sizeSpec is None: + sizeSpec = self._sizeSpec + clone = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + if cloneValueFlag: + self._cloneComponentValues(clone, cloneValueFlag) + return clone + + def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None, + sizeSpec=None, cloneValueFlag=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 size constraint(s) + + Returns + ------- + : + new instance of |ASN.1| type/value + + """ + if implicitTag is not None and implicitTag is not noValue: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None and explicitTag is not noValue: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = self._subtypeSpec + subtypeSpec + if sizeSpec is None or sizeSpec is noValue: + sizeSpec = self._sizeSpec + else: + sizeSpec += self._sizeSpec + clone = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + if cloneValueFlag: + self._cloneComponentValues(clone, cloneValueFlag) + return clone + + def verifySizeSpec(self): + self._sizeSpec(self) + + def getComponentByPosition(self, idx): + raise error.PyAsn1Error('Method not implemented') + + def setComponentByPosition(self, idx, value, verifyConstraints=True): + raise error.PyAsn1Error('Method not implemented') + + def setComponents(self, *args, **kwargs): + for idx, value in enumerate(args): + self[idx] = value + for k in kwargs: + self[k] = kwargs[k] + return self + + def getComponentType(self): + return self._componentType + + # backward compatibility -- no-op + def setDefaultComponents(self): + pass + + @property + def componentTagMap(self): + raise error.PyAsn1Error('Method not implemented') + + def __getitem__(self, idx): + return self.getComponentByPosition(idx) + + def __setitem__(self, idx, value): + self.setComponentByPosition(idx, value) + + def __len__(self): + return len(self._componentValues) + + def clear(self): + self._componentValues = [] + + # backward compatibility + def getComponentTagMap(self): + return self.componentTagMap \ No newline at end of file diff --git a/src/lib/pyasn1/type/char.py b/src/lib/pyasn1/type/char.py new file mode 100644 index 00000000..039e5366 --- /dev/null +++ b/src/lib/pyasn1/type/char.py @@ -0,0 +1,378 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.type import univ, tag +from pyasn1 import error + + +__all__ = ['NumericString', 'PrintableString', 'TeletexString', 'T61String', 'VideotexString', + 'IA5String', 'GraphicString', 'VisibleString', 'ISO646String', + 'GeneralString', 'UniversalString', 'BMPString', 'UTF8String'] + +NoValue = univ.NoValue +noValue = univ.noValue + + +class AbstractCharacterString(univ.OctetString): + """Creates |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python 2 :class:`unicode` or Python 3 :class:`str`. + When used in octet-stream context, |ASN.1| type assumes "|encoding|" encoding. + + Parameters + ---------- + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + unicode object (Python 2) or string (Python 3), alternatively string + (Python 2) or bytes (Python 3) representing octet-stream of serialized + unicode string (note `encoding` parameter) or |ASN.1| class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) or + :class:`str` (Python 3) the payload when |ASN.1| object is used + in octet-stream context. + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + + if sys.version_info[0] <= 2: + def __str__(self): + try: + return self._value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (self._value, self._encoding) + ) + + def __unicode__(self): + return unicode(self._value) + + def prettyIn(self, value): + if isinstance(value, unicode): + return value + elif isinstance(value, str): + try: + return value.decode(self._encoding) + except (LookupError, UnicodeDecodeError): + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, (tuple, list)): + try: + return self.prettyIn(''.join([chr(x) for x in value])) + except ValueError: + raise error.PyAsn1Error( + 'Bad %s initializer \'%s\'' % (self.__class__.__name__, value) + ) + else: + try: + return unicode(value) + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t turn object \'%s\' into unicode' % (value,) + ) + + def asOctets(self, padding=True): + return str(self) + + def asNumbers(self, padding=True): + return tuple([ord(x) for x in str(self)]) + + else: + def __str__(self): + return str(self._value) + + def __bytes__(self): + try: + return self._value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (self._value, self._encoding) + ) + + def prettyIn(self, value): + if isinstance(value, str): + return value + elif isinstance(value, bytes): + try: + return value.decode(self._encoding) + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, (tuple, list)): + return self.prettyIn(bytes(value)) + else: + try: + return str(value) + except (UnicodeDecodeError, ValueError): + raise error.PyAsn1Error( + 'Can\'t turn object \'%s\' into unicode' % (value,) + ) + + def asOctets(self, padding=True): + return bytes(self) + + def asNumbers(self, padding=True): + return tuple(bytes(self)) + + def prettyOut(self, value): + return value + + def __reversed__(self): + return reversed(self._value) + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, + encoding=None, binValue=noValue, hexValue=noValue): + """Creates a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + unicode object (Python 2) or string (Python 3), alternatively string + (Python 2) or bytes (Python 3) representing octet-stream of serialized + unicode string (note `encoding` parameter) or |ASN.1| class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :py:class:`unicode` (Python 2) or + :py:class:`str` (Python 3) the payload when |ASN.1| object is used + in octet-stream context. + + Returns + ------- + : + new instance of |ASN.1| type/value + + """ + return univ.OctetString.clone(self, value, tagSet, subtypeSpec, encoding, binValue, hexValue) + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None, encoding=None, binValue=noValue, hexValue=noValue): + """Creates a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + unicode object (Python 2) or string (Python 3), alternatively string + (Python 2) or bytes (Python 3) representing octet-stream of serialized + unicode string (note `encoding` parameter) or |ASN.1| class instance. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :py:class:`unicode` (Python 2) or + :py:class:`str` (Python 3) the payload when |ASN.1| object is used + in octet-stream context. + + Returns + ------- + : + new instance of |ASN.1| type/value + + """ + return univ.OctetString.subtype(self, value, implicitTag, explicitTag, subtypeSpec, encoding, binValue, hexValue) + + +class NumericString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class PrintableString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class TeletexString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20) + ) + encoding = 'iso-8859-1' + + +class T61String(TeletexString): + __doc__ = TeletexString.__doc__ + + +class VideotexString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21) + ) + encoding = 'iso-8859-1' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class IA5String(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class GraphicString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25) + ) + encoding = 'iso-8859-1' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class VisibleString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class ISO646String(VisibleString): + __doc__ = VisibleString.__doc__ + + +class GeneralString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27) + ) + encoding = 'iso-8859-1' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class UniversalString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28) + ) + encoding = "utf-32-be" + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class BMPString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30) + ) + encoding = "utf-16-be" + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class UTF8String(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12) + ) + encoding = "utf-8" + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() diff --git a/src/lib/pyasn1/type/constraint.py b/src/lib/pyasn1/type/constraint.py new file mode 100644 index 00000000..7f96c507 --- /dev/null +++ b/src/lib/pyasn1/type/constraint.py @@ -0,0 +1,283 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# Original concept and code by Mike C. Fletcher. +# +import sys +from pyasn1.type import error + +__all__ = ['SingleValueConstraint', 'ContainedSubtypeConstraint', 'ValueRangeConstraint', + 'ValueSizeConstraint', 'PermittedAlphabetConstraint', 'InnerTypeConstraint', + 'ConstraintsExclusion', 'ConstraintsIntersection', 'ConstraintsUnion'] + + +class AbstractConstraint(object): + """Abstract base-class for constraint objects + + Constraints should be stored in a simple sequence in the + namespace of their client Asn1Item sub-classes in cases + when ASN.1 constraint is define. + """ + + def __init__(self, *values): + self._valueMap = set() + self._setValues(values) + self.__hashedValues = None + + def __call__(self, value, idx=None): + if not self._values: + return + + try: + self._testValue(value, idx) + + except error.ValueConstraintError: + raise error.ValueConstraintError( + '%s failed at: %r' % (self, sys.exc_info()[1]) + ) + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + ', '.join([repr(x) for x in self._values]) + ) + + def __eq__(self, other): + return self is other and True or self._values == other + + def __ne__(self, other): + return self._values != other + + def __lt__(self, other): + return self._values < other + + def __le__(self, other): + return self._values <= other + + def __gt__(self, other): + return self._values > other + + def __ge__(self, other): + return self._values >= other + + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self._values and True or False + else: + def __bool__(self): + return self._values and True or False + + def __hash__(self): + if self.__hashedValues is None: + self.__hashedValues = hash((self.__class__.__name__, self._values)) + return self.__hashedValues + + # descriptor protocol + + def __get__(self, instance, owner): + if instance is None: + return self + + # This is a bit of hack: look up instance attribute first, + # then try class attribute if instance attribute with that + # name is not available. + # The rationale is to have `.subtypeSpec`/`.sizeSpec` readable-writeable + # as a class attribute and read-only as instance attribute. + try: + return instance._subtypeSpec + + except AttributeError: + try: + return instance._sizeSpec + + except AttributeError: + return self + + def __set__(self, instance, value): + raise AttributeError('attribute is read-only') + + def _setValues(self, values): + self._values = values + + def _testValue(self, value, idx): + raise error.ValueConstraintError(value) + + # Constraints derivation logic + def getValueMap(self): + return self._valueMap + + def isSuperTypeOf(self, otherConstraint): + return (otherConstraint is self or + not self._values or + otherConstraint == self or + self in otherConstraint.getValueMap()) + + def isSubTypeOf(self, otherConstraint): + return (otherConstraint is self or + not self or + otherConstraint == self or + otherConstraint in self._valueMap) + +class SingleValueConstraint(AbstractConstraint): + """Value must be part of defined values constraint""" + + def _setValues(self, values): + self._values = values + self._set = set(values) + + def _testValue(self, value, idx): + if value not in self._set: + raise error.ValueConstraintError(value) + + +class ContainedSubtypeConstraint(AbstractConstraint): + """Value must satisfy all of defined set of constraints""" + + def _testValue(self, value, idx): + for c in self._values: + c(value, idx) + + +class ValueRangeConstraint(AbstractConstraint): + """Value must be within start and stop values (inclusive)""" + + def _testValue(self, value, idx): + if value < self.start or value > self.stop: + raise error.ValueConstraintError(value) + + def _setValues(self, values): + if len(values) != 2: + raise error.PyAsn1Error( + '%s: bad constraint values' % (self.__class__.__name__,) + ) + self.start, self.stop = values + if self.start > self.stop: + raise error.PyAsn1Error( + '%s: screwed constraint values (start > stop): %s > %s' % ( + self.__class__.__name__, + self.start, self.stop + ) + ) + AbstractConstraint._setValues(self, values) + + +class ValueSizeConstraint(ValueRangeConstraint): + """len(value) must be within start and stop values (inclusive)""" + + def _testValue(self, value, idx): + valueSize = len(value) + if valueSize < self.start or valueSize > self.stop: + raise error.ValueConstraintError(value) + + +class PermittedAlphabetConstraint(SingleValueConstraint): + def _setValues(self, values): + self._values = values + self._set = set(values) + + def _testValue(self, value, idx): + if not self._set.issuperset(value): + raise error.ValueConstraintError(value) + + +# This is a bit kludgy, meaning two op modes within a single constraint +class InnerTypeConstraint(AbstractConstraint): + """Value must satisfy type and presense constraints""" + + def _testValue(self, value, idx): + if self.__singleTypeConstraint: + self.__singleTypeConstraint(value) + elif self.__multipleTypeConstraint: + if idx not in self.__multipleTypeConstraint: + raise error.ValueConstraintError(value) + constraint, status = self.__multipleTypeConstraint[idx] + if status == 'ABSENT': # XXX presense is not checked! + raise error.ValueConstraintError(value) + constraint(value) + + def _setValues(self, values): + self.__multipleTypeConstraint = {} + self.__singleTypeConstraint = None + for v in values: + if isinstance(v, tuple): + self.__multipleTypeConstraint[v[0]] = v[1], v[2] + else: + self.__singleTypeConstraint = v + AbstractConstraint._setValues(self, values) + + +# Boolean ops on constraints + +class ConstraintsExclusion(AbstractConstraint): + """Value must not fit the single constraint""" + + def _testValue(self, value, idx): + try: + self._values[0](value, idx) + except error.ValueConstraintError: + return + else: + raise error.ValueConstraintError(value) + + def _setValues(self, values): + if len(values) != 1: + raise error.PyAsn1Error('Single constraint expected') + AbstractConstraint._setValues(self, values) + + +class AbstractConstraintSet(AbstractConstraint): + """Value must not satisfy the single constraint""" + + def __getitem__(self, idx): + return self._values[idx] + + def __iter__(self): + return iter(self._values) + + def __add__(self, value): + return self.__class__(*(self._values + (value,))) + + def __radd__(self, value): + return self.__class__(*((value,) + self._values)) + + def __len__(self): + return len(self._values) + + # Constraints inclusion in sets + + def _setValues(self, values): + self._values = values + for constraint in values: + if constraint: + self._valueMap.add(constraint) + self._valueMap.update(constraint.getValueMap()) + + +class ConstraintsIntersection(AbstractConstraintSet): + """Value must satisfy all constraints""" + + def _testValue(self, value, idx): + for constraint in self._values: + constraint(value, idx) + + +class ConstraintsUnion(AbstractConstraintSet): + """Value must satisfy at least one constraint""" + + def _testValue(self, value, idx): + for constraint in self._values: + try: + constraint(value, idx) + except error.ValueConstraintError: + pass + else: + return + raise error.ValueConstraintError( + 'all of %s failed for \"%s\"' % (self._values, value) + ) + +# XXX +# add tests for type check diff --git a/src/lib/pyasn1/type/error.py b/src/lib/pyasn1/type/error.py new file mode 100644 index 00000000..cbfa276a --- /dev/null +++ b/src/lib/pyasn1/type/error.py @@ -0,0 +1,11 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.error import PyAsn1Error + + +class ValueConstraintError(PyAsn1Error): + pass diff --git a/src/lib/pyasn1/type/namedtype.py b/src/lib/pyasn1/type/namedtype.py new file mode 100644 index 00000000..3f9ae190 --- /dev/null +++ b/src/lib/pyasn1/type/namedtype.py @@ -0,0 +1,475 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +from pyasn1.type import tagmap +from pyasn1 import error + +__all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType', 'NamedTypes'] + + +class NamedType(object): + """Create named field object for a constructed ASN.1 type. + + The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type. + + |NamedType| objects are immutable and duck-type Python :class:`tuple` objects + holding *name* and *asn1Object* components. + + Parameters + ---------- + name: :py:class:`str` + Field name + + asn1Object: + ASN.1 type object + """ + isOptional = False + isDefaulted = False + + def __init__(self, name, asn1Object): + self.__name = name + self.__type = asn1Object + self.__nameAndType = name, asn1Object + + def __repr__(self): + return '%s(%r, %r)' % (self.__class__.__name__, self.__name, self.__type) + + def __eq__(self, other): + return self.__nameAndType == other + + def __ne__(self, other): + return self.__nameAndType != other + + def __lt__(self, other): + return self.__nameAndType < other + + def __le__(self, other): + return self.__nameAndType <= other + + def __gt__(self, other): + return self.__nameAndType > other + + def __ge__(self, other): + return self.__nameAndType >= other + + def __hash__(self): + return hash(self.__nameAndType) + + def __getitem__(self, idx): + return self.__nameAndType[idx] + + def __iter__(self): + return iter(self.__nameAndType) + + @property + def name(self): + return self.__name + + @property + def asn1Object(self): + return self.__type + + # Backward compatibility + + def getName(self): + return self.name + + def getType(self): + return self.asn1Object + + +class OptionalNamedType(NamedType): + __doc__ = NamedType.__doc__ + + isOptional = True + + +class DefaultedNamedType(NamedType): + __doc__ = NamedType.__doc__ + + isDefaulted = True + + +class NamedTypes(object): + """Create a collection of named fields for a constructed ASN.1 type. + + The NamedTypes object represents a collection of named fields of a constructed ASN.1 type. + + *NamedTypes* objects are immutable and duck-type Python :class:`dict` objects + holding *name* as keys and ASN.1 type object as values. + + Parameters + ---------- + *namedTypes: :class:`~pyasn1.type.namedtype.NamedType` + """ + def __init__(self, *namedTypes): + self.__namedTypes = namedTypes + self.__namedTypesLen = len(self.__namedTypes) + self.__minTagSet = None + self.__tagToPosMapImpl = None + self.__nameToPosMapImpl = None + self.__ambigiousTypesImpl = None + self.__tagMap = {} + self.__hasOptionalOrDefault = None + self.__requiredComponents = None + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, ', '.join([repr(x) for x in self.__namedTypes]) + ) + + def __eq__(self, other): + return self.__namedTypes == other + + def __ne__(self, other): + return self.__namedTypes != other + + def __lt__(self, other): + return self.__namedTypes < other + + def __le__(self, other): + return self.__namedTypes <= other + + def __gt__(self, other): + return self.__namedTypes > other + + def __ge__(self, other): + return self.__namedTypes >= other + + def __hash__(self): + return hash(self.__namedTypes) + + def __getitem__(self, idx): + try: + return self.__namedTypes[idx] + + except TypeError: + return self.__namedTypes[self.__nameToPosMap[idx]] + + def __contains__(self, key): + return key in self.__nameToPosMap + + def __iter__(self): + return (x[0] for x in self.__namedTypes) + + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self.__namedTypesLen > 0 + else: + def __bool__(self): + return self.__namedTypesLen > 0 + + def __len__(self): + return self.__namedTypesLen + + # Python dict protocol + + def values(self): + return (namedType.asn1Object for namedType in self.__namedTypes) + + def keys(self): + return (namedType.name for namedType in self.__namedTypes) + + def items(self): + return ((namedType.name, namedType.asn1Object) for namedType in self.__namedTypes) + + def clone(self): + return self.__class__(*self.__namedTypes) + + @property + def __tagToPosMap(self): + if self.__tagToPosMapImpl is None: + self.__tagToPosMapImpl = {} + for idx, namedType in enumerate(self.__namedTypes): + tagMap = namedType.asn1Object.tagMap + if not tagMap: + continue + for _tagSet in tagMap.presentTypes: + if _tagSet in self.__tagToPosMapImpl: + raise error.PyAsn1Error('Duplicate type %s in %s' % (_tagSet, namedType)) + self.__tagToPosMapImpl[_tagSet] = idx + + return self.__tagToPosMapImpl + + @property + def __nameToPosMap(self): + if self.__nameToPosMapImpl is None: + self.__nameToPosMapImpl = {} + for idx, namedType in enumerate(self.__namedTypes): + if namedType.name in self.__nameToPosMapImpl: + raise error.PyAsn1Error('Duplicate name %s in %s' % (namedType.name, namedType)) + self.__nameToPosMapImpl[namedType.name] = idx + + return self.__nameToPosMapImpl + + @property + def __ambigiousTypes(self): + if self.__ambigiousTypesImpl is None: + self.__ambigiousTypesImpl = {} + ambigiousTypes = () + for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))): + if namedType.isOptional or namedType.isDefaulted: + ambigiousTypes = (namedType,) + ambigiousTypes + else: + ambigiousTypes = (namedType,) + self.__ambigiousTypesImpl[idx] = NamedTypes(*ambigiousTypes) + return self.__ambigiousTypesImpl + + def getTypeByPosition(self, idx): + """Return ASN.1 type object by its position in fields set. + + Parameters + ---------- + idx: :py:class:`int` + Field index + + Returns + ------- + : + ASN.1 type + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If given position is out of fields range + """ + try: + return self.__namedTypes[idx].asn1Object + + except IndexError: + raise error.PyAsn1Error('Type position out of range') + + def getPositionByType(self, tagSet): + """Return field position by its ASN.1 type. + + Parameters + ---------- + tagSet: :class:`~pysnmp.type.tag.TagSet` + ASN.1 tag set distinguishing one ASN.1 type from others. + + Returns + ------- + : :py:class:`int` + ASN.1 type position in fields set + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes* + """ + try: + return self.__tagToPosMap[tagSet] + + except KeyError: + raise error.PyAsn1Error('Type %s not found' % (tagSet,)) + + def getNameByPosition(self, idx): + """Return field name by its position in fields set. + + Parameters + ---------- + idx: :py:class:`idx` + Field index + + Returns + ------- + : :py:class:`str` + Field name + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If given field name is not present in callee *NamedTypes* + """ + try: + return self.__namedTypes[idx].name + + except IndexError: + raise error.PyAsn1Error('Type position out of range') + + def getPositionByName(self, name): + """Return field position by filed name. + + Parameters + ---------- + name: :py:class:`str` + Field name + + Returns + ------- + : :py:class:`int` + Field position in fields set + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If *name* is not present or not unique within callee *NamedTypes* + """ + try: + return self.__nameToPosMap[name] + + except KeyError: + raise error.PyAsn1Error('Name %s not found' % (name,)) + + def getTagMapNearPosition(self, idx): + """Return ASN.1 types that are allowed at or past given field position. + + Some ASN.1 serialization allow for skipping optional and defaulted fields. + Some constructed ASN.1 types allow reordering of the fields. When recovering + such objects it may be important to know which types can possibly be + present at any given position in the field sets. + + Parameters + ---------- + idx: :py:class:`int` + Field index + + Returns + ------- + : :class:`~pyasn1.type.tagmap.TagMap` + Map if ASN.1 types allowed at given field position + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If given position is out of fields range + """ + try: + return self.__ambigiousTypes[idx].getTagMap() + + except KeyError: + raise error.PyAsn1Error('Type position out of range') + + def getPositionNearType(self, tagSet, idx): + """Return the closest field position where given ASN.1 type is allowed. + + Some ASN.1 serialization allow for skipping optional and defaulted fields. + Some constructed ASN.1 types allow reordering of the fields. When recovering + such objects it may be important to know at which field position, in field set, + given *tagSet* is allowed at or past *idx* position. + + Parameters + ---------- + tagSet: :class:`~pyasn1.type.tag.TagSet` + ASN.1 type which field position to look up + + idx: :py:class:`int` + Field position at or past which to perform ASN.1 type look up + + Returns + ------- + : :py:class:`int` + Field position in fields set + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If *tagSet* is not present or not unique within callee *NamedTypes* + or *idx* is out of fields range + """ + try: + return idx + self.__ambigiousTypes[idx].getPositionByType(tagSet) + + except KeyError: + raise error.PyAsn1Error('Type position out of range') + + @property + def minTagSet(self): + """Return the minimal TagSet among ASN.1 type in callee *NamedTypes*. + + Some ASN.1 types/serialization protocols require ASN.1 types to be + arranged based on their numerical tag value. The *minTagSet* property + returns that. + + Returns + ------- + : :class:`~pyasn1.type.tagset.TagSet` + Minimal TagSet among ASN.1 types in callee *NamedTypes* + """ + if self.__minTagSet is None: + for namedType in self.__namedTypes: + asn1Object = namedType.asn1Object + try: + tagSet = asn1Object.getMinTagSet() + + except AttributeError: + tagSet = asn1Object.tagSet + if self.__minTagSet is None or tagSet < self.__minTagSet: + self.__minTagSet = tagSet + return self.__minTagSet + + def getTagMap(self, unique=False): + """Create a *TagMap* object from tags and types recursively. + + Create a new :class:`~pyasn1.type.tagmap.TagMap` object by + combining tags from *TagMap* objects of children types and + associating them with their immediate child type. + + Example + ------- + + .. code-block:: python + + OuterType ::= CHOICE { + innerType INTEGER + } + + Calling *.getTagMap()* on *OuterType* will yield a map like this: + + .. code-block:: python + + Integer.tagSet -> Choice + + Parameters + ---------- + unique: :py:class:`bool` + If `True`, duplicate *TagSet* objects occurring while building + new *TagMap* would cause error. + + Returns + ------- + : :class:`~pyasn1.type.tagmap.TagMap` + New *TagMap* holding *TagSet* object gathered from childen types. + """ + if unique not in self.__tagMap: + presentTypes = {} + skipTypes = {} + defaultType = None + for namedType in self.__namedTypes: + tagMap = namedType.asn1Object.tagMap + for tagSet in tagMap: + if unique and tagSet in presentTypes: + raise error.PyAsn1Error('Non-unique tagSet %s' % (tagSet,)) + presentTypes[tagSet] = namedType.asn1Object + skipTypes.update(tagMap.skipTypes) + + if defaultType is None: + defaultType = tagMap.defaultType + elif tagMap.defaultType is not None: + raise error.PyAsn1Error('Duplicate default ASN.1 type at %s' % (self,)) + + self.__tagMap[unique] = tagmap.TagMap(presentTypes, skipTypes, defaultType) + + return self.__tagMap[unique] + + @property + def hasOptionalOrDefault(self): + if self.__hasOptionalOrDefault is None: + self.__hasOptionalOrDefault = bool([True for namedType in self.__namedTypes if namedType.isDefaulted or namedType.isOptional]) + return self.__hasOptionalOrDefault + + @property + def namedTypes(self): + return iter(self.__namedTypes) + + @property + def requiredComponents(self): + if self.__requiredComponents is None: + self.__requiredComponents = frozenset( + [idx for idx, nt in enumerate(self.__namedTypes) if not nt.isOptional and not nt.isDefaulted] + ) + return self.__requiredComponents diff --git a/src/lib/pyasn1/type/namedval.py b/src/lib/pyasn1/type/namedval.py new file mode 100644 index 00000000..bcdbf153 --- /dev/null +++ b/src/lib/pyasn1/type/namedval.py @@ -0,0 +1,94 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +# ASN.1 named integers +# +from pyasn1 import error + +__all__ = ['NamedValues'] + + +class NamedValues(object): + def __init__(self, *namedValues): + self.nameToValIdx = {} + self.valToNameIdx = {} + self.namedValues = () + automaticVal = 1 + for namedValue in namedValues: + if isinstance(namedValue, tuple): + name, val = namedValue + else: + name = namedValue + val = automaticVal + if name in self.nameToValIdx: + raise error.PyAsn1Error('Duplicate name %s' % (name,)) + self.nameToValIdx[name] = val + if val in self.valToNameIdx: + raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val)) + self.valToNameIdx[val] = name + self.namedValues = self.namedValues + ((name, val),) + automaticVal += 1 + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues])) + + def __str__(self): + return str(self.namedValues) + + def __eq__(self, other): + return tuple(self) == tuple(other) + + def __ne__(self, other): + return tuple(self) != tuple(other) + + def __lt__(self, other): + return tuple(self) < tuple(other) + + def __le__(self, other): + return tuple(self) <= tuple(other) + + def __gt__(self, other): + return tuple(self) > tuple(other) + + def __ge__(self, other): + return tuple(self) >= tuple(other) + + def __hash__(self): + return hash(tuple(self)) + + def getName(self, value): + if value in self.valToNameIdx: + return self.valToNameIdx[value] + + def getValue(self, name): + if name in self.nameToValIdx: + return self.nameToValIdx[name] + + def getValues(self, *names): + try: + return [self.nameToValIdx[name] for name in names] + + except KeyError: + raise error.PyAsn1Error( + 'Unknown bit identifier(s): %s' % (set(names).difference(self.nameToValIdx),) + ) + + def __getitem__(self, i): + return self.namedValues[i] + + def __len__(self): + return len(self.namedValues) + + def __add__(self, namedValues): + return self.__class__(*self.namedValues + namedValues) + + def __radd__(self, namedValues): + return self.__class__(*namedValues + tuple(self)) + + def clone(self, *namedValues): + return self.__class__(*tuple(self) + namedValues) + +# XXX clone/subtype? diff --git a/src/lib/pyasn1/type/tag.py b/src/lib/pyasn1/type/tag.py new file mode 100644 index 00000000..aaf18572 --- /dev/null +++ b/src/lib/pyasn1/type/tag.py @@ -0,0 +1,342 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1 import error + +__all__ = ['tagClassUniversal', 'tagClassApplication', 'tagClassContext', + 'tagClassPrivate', 'tagFormatSimple', 'tagFormatConstructed', + 'tagCategoryImplicit', 'tagCategoryExplicit', 'tagCategoryUntagged', + 'Tag', 'TagSet'] + +#: Identifier for ASN.1 class UNIVERSAL +tagClassUniversal = 0x00 + +#: Identifier for ASN.1 class APPLICATION +tagClassApplication = 0x40 + +#: Identifier for ASN.1 class context-specific +tagClassContext = 0x80 + +#: Identifier for ASN.1 class private +tagClassPrivate = 0xC0 + +#: Identifier for "simple" ASN.1 structure (e.g. scalar) +tagFormatSimple = 0x00 + +#: Identifier for "constructed" ASN.1 structure (e.g. may have inner components) +tagFormatConstructed = 0x20 + +tagCategoryImplicit = 0x01 +tagCategoryExplicit = 0x02 +tagCategoryUntagged = 0x04 + + +class Tag(object): + """Create ASN.1 tag + + Represents ASN.1 tag that can be attached to a ASN.1 type to make + types distinguishable from each other. + + *Tag* objects are immutable and duck-type Python :class:`tuple` objects + holding three integer components of a tag. + + Parameters + ---------- + tagClass: :py:class:`int` + Tag *class* value + + tagFormat: :py:class:`int` + Tag *format* value + + tagId: :py:class:`int` + Tag ID value + """ + def __init__(self, tagClass, tagFormat, tagId): + if tagId < 0: + raise error.PyAsn1Error('Negative tag ID (%s) not allowed' % tagId) + self.__tagClass = tagClass + self.__tagFormat = tagFormat + self.__tagId = tagId + self.__tagClassId = tagClass, tagId + self.__lazyHash = None + + def __str__(self): + return '[%s:%s:%s]' % (self.__tagClass, self.__tagFormat, self.__tagId) + + def __repr__(self): + return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % ( + (self.__class__.__name__, self.__tagClass, self.__tagFormat, self.__tagId) + ) + + def __eq__(self, other): + return self.__tagClassId == other + + def __ne__(self, other): + return self.__tagClassId != other + + def __lt__(self, other): + return self.__tagClassId < other + + def __le__(self, other): + return self.__tagClassId <= other + + def __gt__(self, other): + return self.__tagClassId > other + + def __ge__(self, other): + return self.__tagClassId >= other + + def __hash__(self): + if self.__lazyHash is None: + self.__lazyHash = hash(self.__tagClassId) + return self.__lazyHash + + def __getitem__(self, idx): + if idx == 0: + return self.__tagClass + elif idx == 1: + return self.__tagFormat + elif idx == 2: + return self.__tagId + else: + raise IndexError() + + def __iter__(self): + yield self.__tagClass + yield self.__tagFormat + yield self.__tagId + + def __and__(self, otherTag): + return self.__class__(self.__tagClass & otherTag.tagClass, + self.__tagFormat & otherTag.tagFormat, + self.__tagId & otherTag.tagId) + + def __or__(self, otherTag): + return self.__class__(self.__tagClass | otherTag.tagClass, + self.__tagFormat | otherTag.tagFormat, + self.__tagId | otherTag.tagId) + + @property + def tagClass(self): + """ASN.1 tag class + + Returns + ------- + : :py:class:`int` + Tag class + """ + return self.__tagClass + + @property + def tagFormat(self): + """ASN.1 tag format + + Returns + ------- + : :py:class:`int` + Tag format + """ + return self.__tagFormat + + @property + def tagId(self): + """ASN.1 tag ID + + Returns + ------- + : :py:class:`int` + Tag ID + """ + return self.__tagId + + +class TagSet(object): + """Create a collection of ASN.1 tags + + Represents a combination of :class:`~pyasn1.type.tag.Tag` objects + that can be attached to a ASN.1 type to make types distinguishable + from each other. + + *TagSet* objects are immutable and duck-type Python :class:`tuple` objects + holding arbitrary number of :class:`~pyasn1.type.tag.Tag` objects. + + Parameters + ---------- + baseTag: :class:`~pyasn1.type.tag.Tag` + Base *Tag* object. This tag survives IMPLICIT tagging. + + *superTags: :class:`~pyasn1.type.tag.Tag` + Additional *Tag* objects taking part in subtyping. + """ + def __init__(self, baseTag=(), *superTags): + self.__baseTag = baseTag + self.__superTags = superTags + self.__superTagsSignature = tuple( + [(superTag.tagClass, superTag.tagId) for superTag in superTags] + ) + self.__lenOfSuperTags = len(superTags) + self.__lazyHash = None + + def __str__(self): + return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]' + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, '(), ' + ', '.join([repr(x) for x in self.__superTags]) + ) + + def __add__(self, superTag): + return self.__class__(self.__baseTag, *self.__superTags + (superTag,)) + + def __radd__(self, superTag): + return self.__class__(self.__baseTag, *(superTag,) + self.__superTags) + + def __getitem__(self, i): + if i.__class__ is slice: + return self.__class__(self.__baseTag, *self.__superTags[i]) + else: + return self.__superTags[i] + + def __eq__(self, other): + return self.__superTagsSignature == other + + def __ne__(self, other): + return self.__superTagsSignature != other + + def __lt__(self, other): + return self.__superTagsSignature < other + + def __le__(self, other): + return self.__superTagsSignature <= other + + def __gt__(self, other): + return self.__superTagsSignature > other + + def __ge__(self, other): + return self.__superTagsSignature >= other + + def __hash__(self): + if self.__lazyHash is None: + self.__lazyHash = hash(self.__superTags) + return self.__lazyHash + + def __len__(self): + return self.__lenOfSuperTags + + # descriptor protocol + + def __get__(self, instance, owner): + if instance is None: + return self + + # This is a bit of hack: look up instance attribute first, + # then try class attribute if instance attribute with that + # name is not available. + # The rationale is to have `.tagSet` readable-writeable + # as a class attribute and read-only as instance attribute. + try: + return instance._tagSet + + except AttributeError: + return self + + def __set__(self, instance, value): + raise AttributeError('attribute is read-only') + + @property + def baseTag(self): + """Return base ASN.1 tag + + Returns + ------- + : :class:`~pyasn1.type.tag.Tag` + Base tag of this *TagSet* + """ + return self.__baseTag + + @property + def superTags(self): + """Return ASN.1 tags + + Returns + ------- + : :py:class:`tuple` + Tuple of :class:`~pyasn1.type.tag.Tag` objects that this *TagSet* contains + """ + return self.__superTags + + def tagExplicitly(self, superTag): + """Return explicitly tagged *TagSet* + + Create a new *TagSet* representing callee *TagSet* explicitly tagged + with passed tag(s). With explicit tagging mode, new tags are appended + to existing tag(s). + + Parameters + ---------- + superTag: :class:`~pyasn1.type.tag.Tag` + *Tag* object to tag this *TagSet* + + Returns + ------- + : :class:`~pyasn1.type.tag.TagSet` + New *TagSet* object + """ + if superTag.tagClass == tagClassUniversal: + raise error.PyAsn1Error('Can\'t tag with UNIVERSAL class tag') + if superTag.tagFormat != tagFormatConstructed: + superTag = Tag(superTag.tagClass, tagFormatConstructed, superTag.tagId) + return self + superTag + + def tagImplicitly(self, superTag): + """Return implicitly tagged *TagSet* + + Create a new *TagSet* representing callee *TagSet* implicitly tagged + with passed tag(s). With implicit tagging mode, new tag(s) replace the + last existing tag. + + Parameters + ---------- + superTag: :class:`~pyasn1.type.tag.Tag` + *Tag* object to tag this *TagSet* + + Returns + ------- + : :class:`~pyasn1.type.tag.TagSet` + New *TagSet* object + """ + if self.__superTags: + superTag = Tag(superTag.tagClass, self.__superTags[-1].tagFormat, superTag.tagId) + return self[:-1] + superTag + + def isSuperTagSetOf(self, tagSet): + """Test type relationship against given *TagSet* + + The callee is considered to be a supertype of given *TagSet* + tag-wise if all tags in *TagSet* are present in the callee and + they are in the same order. + + Parameters + ---------- + tagSet: :class:`~pyasn1.type.tag.TagSet` + *TagSet* object to evaluate against the callee + + Returns + ------- + : :py:class:`bool` + `True` if callee is a supertype of *tagSet* + """ + if len(tagSet) < self.__lenOfSuperTags: + return False + return self.__superTags == tagSet[:self.__lenOfSuperTags] + + # Backward compatibility + + def getBaseTag(self): + return self.__baseTag + +def initTagSet(tag): + return TagSet(tag, tag) diff --git a/src/lib/pyasn1/type/tagmap.py b/src/lib/pyasn1/type/tagmap.py new file mode 100644 index 00000000..8527f33d --- /dev/null +++ b/src/lib/pyasn1/type/tagmap.py @@ -0,0 +1,102 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1 import error + +__all__ = ['TagMap'] + + +class TagMap(object): + """Map *TagSet* objects to ASN.1 types + + Create an object mapping *TagSet* object to ASN.1 type. + + *TagMap* objects are immutable and duck-type read-only Python + :class:`dict` objects holding *TagSet* objects as keys and ASN.1 + type objects as values. + + Parameters + ---------- + presentTypes: :py:class:`dict` + Map of :class:`~pyasn1.type.tag.TagSet` to ASN.1 objects considered + as being unconditionally present in the *TagMap*. + + skipTypes: :py:class:`dict` + A collection of :class:`~pyasn1.type.tag.TagSet` objects considered + as absent in the *TagMap* even when *defaultType* is present. + + defaultType: ASN.1 type object + An ASN.1 type object callee *TagMap* returns for any *TagSet* key not present + in *presentTypes* (unless given key is present in *skipTypes*). + """ + def __init__(self, presentTypes=None, skipTypes=None, defaultType=None): + self.__presentTypes = presentTypes or {} + self.__skipTypes = skipTypes or {} + self.__defaultType = defaultType + + def __contains__(self, tagSet): + return (tagSet in self.__presentTypes or + self.__defaultType is not None and tagSet not in self.__skipTypes) + + def __getitem__(self, tagSet): + try: + return self.__presentTypes[tagSet] + except KeyError: + if self.__defaultType is None: + raise KeyError() + elif tagSet in self.__skipTypes: + raise error.PyAsn1Error('Key in negative map') + else: + return self.__defaultType + + def __iter__(self): + return iter(self.__presentTypes) + + def __repr__(self): + s = self.__class__.__name__ + '(' + if self.__presentTypes: + s += 'presentTypes=%r, ' % (self.__presentTypes,) + if self.__skipTypes: + s += 'skipTypes=%r, ' % (self.__skipTypes,) + if self.__defaultType is not None: + s += 'defaultType=%r' % (self.__defaultType,) + return s + ')' + + def __str__(self): + s = self.__class__.__name__ + ': ' + if self.__presentTypes: + s += 'presentTypes: %s, ' % ', '.join([x.prettyPrintType() for x in self.__presentTypes.values()]) + if self.__skipTypes: + s += 'skipTypes: %s, ' % ', '.join([x.prettyPrintType() for x in self.__skipTypes.values()]) + if self.__defaultType is not None: + s += 'defaultType: %s, ' % self.__defaultType.prettyPrintType() + return s + + @property + def presentTypes(self): + """Return *TagSet* to ASN.1 type map present in callee *TagMap*""" + return self.__presentTypes + + @property + def skipTypes(self): + """Return *TagSet* collection unconditionally absent in callee *TagMap*""" + return self.__skipTypes + + @property + def defaultType(self): + """Return default ASN.1 type being returned for any missing *TagSet*""" + return self.__defaultType + + # Backward compatibility + + def getPosMap(self): + return self.presentTypes + + def getNegMap(self): + return self.skipTypes + + def getDef(self): + return self.defaultType diff --git a/src/lib/pyasn1/type/univ.py b/src/lib/pyasn1/type/univ.py new file mode 100644 index 00000000..1a146e03 --- /dev/null +++ b/src/lib/pyasn1/type/univ.py @@ -0,0 +1,2806 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +import math +from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap +from pyasn1.codec.ber import eoo +from pyasn1.compat import octets, integer, binary +from pyasn1 import error + +NoValue = base.NoValue +noValue = NoValue() + +__all__ = ['Integer', 'Boolean', 'BitString', 'OctetString', 'Null', + 'ObjectIdentifier', 'Real', 'Enumerated', 'SequenceOfAndSetOfBase', 'SequenceOf', + 'SetOf', 'SequenceAndSetBase', 'Sequence', 'Set', 'Choice', 'Any', + 'NoValue', 'noValue'] + +# "Simple" ASN.1 types (yet incomplete) + +class Integer(base.AbstractSimpleAsn1Item): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`int` objects. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Python integer or string literal or |ASN.1| class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Object representing non-default symbolic aliases for numbers + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers + namedValues = namedval.NamedValues() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None, + namedValues=None): + if namedValues is None: + self.__namedValues = self.namedValues + else: + self.__namedValues = namedValues + base.AbstractSimpleAsn1Item.__init__( + self, value, tagSet, subtypeSpec + ) + + def __repr__(self): + if self.__namedValues is not self.namedValues: + return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues) + else: + return base.AbstractSimpleAsn1Item.__repr__(self) + + def __and__(self, value): + return self.clone(self._value & value) + + def __rand__(self, value): + return self.clone(value & self._value) + + def __or__(self, value): + return self.clone(self._value | value) + + def __ror__(self, value): + return self.clone(value | self._value) + + def __xor__(self, value): + return self.clone(self._value ^ value) + + def __rxor__(self, value): + return self.clone(value ^ self._value) + + def __lshift__(self, value): + return self.clone(self._value << value) + + def __rshift__(self, value): + return self.clone(self._value >> value) + + def __add__(self, value): + return self.clone(self._value + value) + + def __radd__(self, value): + return self.clone(value + self._value) + + def __sub__(self, value): + return self.clone(self._value - value) + + def __rsub__(self, value): + return self.clone(value - self._value) + + def __mul__(self, value): + return self.clone(self._value * value) + + def __rmul__(self, value): + return self.clone(value * self._value) + + def __mod__(self, value): + return self.clone(self._value % value) + + def __rmod__(self, value): + return self.clone(value % self._value) + + def __pow__(self, value, modulo=None): + return self.clone(pow(self._value, value, modulo)) + + def __rpow__(self, value): + return self.clone(pow(value, self._value)) + + def __floordiv__(self, value): + return self.clone(self._value // value) + + def __rfloordiv__(self, value): + return self.clone(value // self._value) + + if sys.version_info[0] <= 2: + def __div__(self, value): + if isinstance(value, float): + return Real(self._value / value) + else: + return self.clone(self._value / value) + + def __rdiv__(self, value): + if isinstance(value, float): + return Real(value / self._value) + else: + return self.clone(value / self._value) + else: + def __truediv__(self, value): + return Real(self._value / value) + + def __rtruediv__(self, value): + return Real(value / self._value) + + def __divmod__(self, value): + return self.clone(divmod(self._value, value)) + + def __rdivmod__(self, value): + return self.clone(divmod(value, self._value)) + + __hash__ = base.AbstractSimpleAsn1Item.__hash__ + + def __int__(self): + return int(self._value) + + if sys.version_info[0] <= 2: + def __long__(self): return long(self._value) + + def __float__(self): + return float(self._value) + + def __abs__(self): + return self.clone(abs(self._value)) + + def __index__(self): + return int(self._value) + + def __pos__(self): + return self.clone(+self._value) + + def __neg__(self): + return self.clone(-self._value) + + def __invert__(self): + return self.clone(~self._value) + + def __round__(self, n=0): + r = round(self._value, n) + if n: + return self.clone(r) + else: + return r + + def __floor__(self): + return math.floor(self._value) + + def __ceil__(self): + return math.ceil(self._value) + + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): + return self.clone(math.trunc(self._value)) + + def __lt__(self, value): + return self._value < value + + def __le__(self, value): + return self._value <= value + + def __eq__(self, value): + return self._value == value + + def __ne__(self, value): + return self._value != value + + def __gt__(self, value): + return self._value > value + + def __ge__(self, value): + return self._value >= value + + def prettyIn(self, value): + try: + return int(value) + + except ValueError: + valueOfName = self.__namedValues.getValue(value) + if valueOfName is not None: + return valueOfName + + raise error.PyAsn1Error( + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) + ) + + def prettyOut(self, value): + nameOfValue = self.__namedValues.getName(value) + return nameOfValue is None and str(value) or repr(nameOfValue) + + def getNamedValues(self): + return self.__namedValues + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, namedValues=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Object representing symbolic aliases for numbers to use instead of inheriting from caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: + value = self._value + else: + isModified = True + if tagSet is None or tagSet is noValue: + tagSet = self._tagSet + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + isModified = True + if namedValues is None or namedValues is noValue: + namedValues = self.__namedValues + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues) + else: + return self + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None, namedValues=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Add given object representing symbolic aliases for numbers + to one of the caller, then use the result as new object's + named numbers. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: + value = self._value + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: + tagSet = self._tagSet.tagImplicitly(implicitTag) + isModified = True + elif explicitTag is not None and explicitTag is not noValue: + tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True + else: + tagSet = self._tagSet + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + if namedValues is None or namedValues is noValue: + namedValues = self.__namedValues + else: + namedValues = namedValues + self.__namedValues + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues) + else: + return self + + +class Boolean(Integer): + __doc__ = Integer.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01), + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = Integer.subtypeSpec + constraint.SingleValueConstraint(0, 1) + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers + namedValues = Integer.namedValues.clone(('False', 0), ('True', 1)) + + # Optimization for faster codec lookup + typeId = Integer.getTypeId() + + +class BitString(base.AbstractSimpleAsn1Item): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type both Python :class:`tuple` (as a tuple + of bits) and :class:`int` objects. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Python integer or string literal representing binary or hexadecimal + number or sequence of integer bits or |ASN.1| object. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Object representing non-default symbolic aliases for numbers + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers + namedValues = namedval.NamedValues() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + defaultBinValue = defaultHexValue = noValue + + if sys.version_info[0] < 3: + SizedIntegerBase = long + else: + SizedIntegerBase = int + + class SizedInteger(SizedIntegerBase): + bitLength = leadingZeroBits = None + + def setBitLength(self, bitLength): + self.bitLength = bitLength + self.leadingZeroBits = max(bitLength - integer.bitLength(self), 0) + return self + + def __len__(self): + if self.bitLength is None: + self.setBitLength(integer.bitLength(self)) + + return self.bitLength + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None, + namedValues=None, binValue=noValue, hexValue=noValue): + if namedValues is None: + self.__namedValues = self.namedValues + else: + self.__namedValues = namedValues + if binValue is not noValue: + value = self.fromBinaryString(binValue) + elif hexValue is not noValue: + value = self.fromHexString(hexValue) + elif value is None or value is noValue: + if self.defaultBinValue is not noValue: + value = self.fromBinaryString(self.defaultBinValue) + elif self.defaultHexValue is not noValue: + value = self.fromHexString(self.defaultHexValue) + base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec) + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, + namedValues=None, binValue=noValue, hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Class instance representing BitString type enumerations + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: + value = self._value + else: + isModified = True + if tagSet is None or tagSet is noValue: + tagSet = self._tagSet + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + isModified = True + if namedValues is None or namedValues is noValue: + namedValues = self.__namedValues + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues, binValue, hexValue) + else: + return self + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None, namedValues=None, binValue=noValue, hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Add given object representing symbolic aliases for numbers + to one of the caller, then use the result as new object's + named numbers. + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: + value = self._value + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: + tagSet = self._tagSet.tagImplicitly(implicitTag) + isModified = True + elif explicitTag is not None and explicitTag is not noValue: + tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True + else: + tagSet = self._tagSet + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + if namedValues is None or namedValues is noValue: + namedValues = self.__namedValues + else: + namedValues = namedValues + self.__namedValues + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues, binValue, hexValue) + else: + return self + + def __str__(self): + return self.asBinary() + + def __eq__(self, other): + other = self.prettyIn(other) + return self is other or self._value == other and len(self._value) == len(other) + + def __ne__(self, other): + other = self.prettyIn(other) + return self._value != other or len(self._value) != len(other) + + def __lt__(self, other): + other = self.prettyIn(other) + return len(self._value) < len(other) or len(self._value) == len(other) and self._value < other + + def __le__(self, other): + other = self.prettyIn(other) + return len(self._value) <= len(other) or len(self._value) == len(other) and self._value <= other + + def __gt__(self, other): + other = self.prettyIn(other) + return len(self._value) > len(other) or len(self._value) == len(other) and self._value > other + + def __ge__(self, other): + other = self.prettyIn(other) + return len(self._value) >= len(other) or len(self._value) == len(other) and self._value >= other + + # Immutable sequence object protocol + + def __len__(self): + return len(self._value) + + def __getitem__(self, i): + if i.__class__ is slice: + return self.clone([self[x] for x in range(*i.indices(len(self)))]) + else: + length = len(self._value) - 1 + if i > length or i < 0: + raise IndexError('bit index out of range') + return (self._value >> (length - i)) & 1 + + def __iter__(self): + length = len(self._value) + while length: + length -= 1 + yield (self._value >> length) & 1 + + def __reversed__(self): + return reversed(tuple(self)) + + # arithmetic operators + + def __add__(self, value): + value = self.prettyIn(value) + return self.clone(self.SizedInteger(self._value << len(value) | value).setBitLength(len(self._value) + len(value))) + + def __radd__(self, value): + value = self.prettyIn(value) + return self.clone(self.SizedInteger(value << len(self._value) | self._value).setBitLength(len(self._value) + len(value))) + + def __mul__(self, value): + bitString = self._value + while value > 1: + bitString <<= len(self._value) + bitString |= self._value + value -= 1 + return self.clone(bitString) + + def __rmul__(self, value): + return self * value + + def __lshift__(self, count): + return self.clone(self.SizedInteger(self._value << count).setBitLength(len(self._value) + count)) + + def __rshift__(self, count): + return self.clone(self.SizedInteger(self._value >> count).setBitLength(max(0, len(self._value) - count))) + + def __int__(self): + return self._value + + def __float__(self): + return float(self._value) + + if sys.version_info[0] < 3: + def __long__(self): + return self._value + + def asNumbers(self): + """Get |ASN.1| value as a sequence of 8-bit integers. + + If |ASN.1| object length is not a multiple of 8, result + will be left-padded with zeros. + """ + return tuple(octets.octs2ints(self.asOctets())) + + def asOctets(self): + """Get |ASN.1| value as a sequence of octets. + + If |ASN.1| object length is not a multiple of 8, result + will be left-padded with zeros. + """ + return integer.to_bytes(self._value, length=len(self)) + + def asInteger(self): + """Get |ASN.1| value as a single integer value. + """ + return self._value + + def asBinary(self): + """Get |ASN.1| value as a text string of bits. + """ + binString = binary.bin(self._value)[2:] + return '0'*(len(self._value) - len(binString)) + binString + + @classmethod + def fromHexString(cls, value): + try: + return cls.SizedInteger(value, 16).setBitLength(len(value) * 4) + + except ValueError: + raise error.PyAsn1Error('%s.fromHexString() error: %s' % (cls.__name__, sys.exc_info()[1])) + + @classmethod + def fromBinaryString(cls, value): + try: + return cls.SizedInteger(value or '0', 2).setBitLength(len(value)) + + except ValueError: + raise error.PyAsn1Error('%s.fromBinaryString() error: %s' % (cls.__name__, sys.exc_info()[1])) + + @classmethod + def fromOctetString(cls, value, padding=0): + return cls(cls.SizedInteger(integer.from_bytes(value) >> padding).setBitLength(len(value) * 8 - padding)) + + def prettyIn(self, value): + if octets.isStringType(value): + if not value: + return self.SizedInteger(0).setBitLength(0) + + elif value[0] == '\'': # "'1011'B" -- ASN.1 schema representation (deprecated) + if value[-2:] == '\'B': + return self.fromBinaryString(value[1:-2]) + elif value[-2:] == '\'H': + return self.fromHexString(value[1:-2]) + else: + raise error.PyAsn1Error( + 'Bad BIT STRING value notation %s' % (value,) + ) + + elif self.__namedValues and not value.isdigit(): # named bits like 'Urgent, Active' + bitPositions = self.__namedValues.getValues(*[x.strip() for x in value.split(',')]) + + rightmostPosition = max(bitPositions) + + number = 0 + for bitPosition in bitPositions: + number |= 1 << (rightmostPosition - bitPosition) + + return self.SizedInteger(number).setBitLength(rightmostPosition + 1) + + elif value.startswith('0x'): + return self.fromHexString(value[2:]) + + elif value.startswith('0b'): + return self.fromBinaryString(value[2:]) + + else: # assume plain binary string like '1011' + return self.fromBinaryString(value) + + elif isinstance(value, (tuple, list)): + return self.fromBinaryString(''.join([b and '1' or '0' for b in value])) + + elif isinstance(value, (self.SizedInteger, BitString)): + return self.SizedInteger(value).setBitLength(len(value)) + + elif isinstance(value, intTypes): + return self.SizedInteger(value) + + else: + raise error.PyAsn1Error( + 'Bad BitString initializer type \'%s\'' % (value,) + ) + + def prettyOut(self, value): + return '\'%s\'' % str(self) + + +try: + # noinspection PyStatementEffect + all + +except NameError: # Python 2.4 + # noinspection PyShadowingBuiltins + def all(iterable): + for element in iterable: + if not element: + return False + return True + + +class OctetString(base.AbstractSimpleAsn1Item): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python 2 :class:`str` or Python 3 :class:`bytes`. + When used in Unicode context, |ASN.1| type assumes "|encoding|" serialization. + + Parameters + ---------- + value : :class:`str`, :class:`bytes` or |ASN.1| object + string (Python 2) or bytes (Python 3), alternatively unicode object + (Python 2) or string (Python 3) representing character string to be + serialized into octets (note `encoding` parameter) or |ASN.1| object. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) or + :class:`str` (Python 3) the payload when |ASN.1| object is used + in text string context. + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + defaultBinValue = defaultHexValue = noValue + encoding = 'iso-8859-1' + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None, + encoding=None, binValue=noValue, hexValue=noValue): + if encoding is None: + self._encoding = self.encoding + else: + self._encoding = encoding + if binValue is not noValue: + value = self.fromBinaryString(binValue) + elif hexValue is not noValue: + value = self.fromHexString(hexValue) + elif value is None or value is noValue: + if self.defaultBinValue is not noValue: + value = self.fromBinaryString(self.defaultBinValue) + elif self.defaultHexValue is not noValue: + value = self.fromHexString(self.defaultHexValue) + self.__asNumbersCache = None + base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec) + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, + encoding=None, binValue=noValue, hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`str`, :class:`bytes` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) + or :class:`str` (Python 3) the payload when |ASN.1| + object is used in string context. + + binValue: :py:class:`str` + Binary string initializer. Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer. Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: + value = self._value + else: + isModified = True + if tagSet is None or tagSet is noValue: + tagSet = self._tagSet + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + isModified = True + if encoding is None or encoding is noValue: + encoding = self._encoding + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, encoding, binValue, hexValue) + else: + return self + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None, encoding=None, binValue=noValue, + hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`str`, :class:`bytes` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to |ASN.1| object tag set + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to |ASN.1| object tag set + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) + or :class:`str` (Python 3) the payload when *OctetString* + object is used in string context. + + binValue: :py:class:`str` + Binary string initializer. Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer. Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: + value = self._value + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: + tagSet = self._tagSet.tagImplicitly(implicitTag) + isModified = True + elif explicitTag is not None and explicitTag is not noValue: + tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True + else: + tagSet = self._tagSet + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + if encoding is None or encoding is noValue: + encoding = self._encoding + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, encoding, binValue, hexValue) + else: + return self + + if sys.version_info[0] <= 2: + def prettyIn(self, value): + if isinstance(value, str): + return value + elif isinstance(value, unicode): + try: + return value.encode(self._encoding) + except (LookupError, UnicodeEncodeError): + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, (tuple, list)): + try: + return ''.join([chr(x) for x in value]) + except ValueError: + raise error.PyAsn1Error( + 'Bad %s initializer \'%s\'' % (self.__class__.__name__, value) + ) + else: + return str(value) + + def __str__(self): + return str(self._value) + + def __unicode__(self): + try: + return self._value.decode(self._encoding) + + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec' % (self._value, self._encoding) + ) + + def asOctets(self): + return str(self._value) + + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple([ord(x) for x in self._value]) + return self.__asNumbersCache + + else: + def prettyIn(self, value): + if isinstance(value, bytes): + return value + elif isinstance(value, str): + try: + return value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, OctetString): # a shortcut, bytes() would work the same way + return value.asOctets() + elif isinstance(value, base.AbstractSimpleAsn1Item): # this mostly targets Integer objects + return self.prettyIn(str(value)) + elif isinstance(value, (tuple, list)): + return self.prettyIn(bytes(value)) + else: + return bytes(value) + + def __str__(self): + try: + return self._value.decode(self._encoding) + + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec at \'%s\'' % (self._value, self._encoding, self.__class__.__name__) + ) + + def __bytes__(self): + return bytes(self._value) + + def asOctets(self): + return bytes(self._value) + + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple(self._value) + return self.__asNumbersCache + + def prettyOut(self, value): + if sys.version_info[0] <= 2: + numbers = tuple((ord(x) for x in value)) + else: + numbers = tuple(value) + for x in numbers: + if x < 32 or x > 126: + return '0x' + ''.join(('%.2x' % x for x in numbers)) + else: + return octets.octs2str(value) + + @staticmethod + def fromBinaryString(value): + bitNo = 8 + byte = 0 + r = [] + for v in value: + if bitNo: + bitNo -= 1 + else: + bitNo = 7 + r.append(byte) + byte = 0 + if v in ('0', '1'): + v = int(v) + else: + raise error.PyAsn1Error( + 'Non-binary OCTET STRING initializer %s' % (v,) + ) + byte |= v << bitNo + + r.append(byte) + + return octets.ints2octs(r) + + @staticmethod + def fromHexString(value): + r = [] + p = [] + for v in value: + if p: + r.append(int(p + v, 16)) + p = None + else: + p = v + if p: + r.append(int(p + '0', 16)) + + return octets.ints2octs(r) + + def __repr__(self): + r = [] + doHex = False + if self._value is not self.defaultValue: + for x in self.asNumbers(): + if x < 32 or x > 126: + doHex = True + break + if not doHex: + r.append('%r' % (self._value,)) + if self._tagSet is not self.__class__.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + if self.encoding is not self._encoding: + r.append('encoding=%r' % (self._encoding,)) + if doHex: + r.append('hexValue=%r' % ''.join(['%.2x' % x for x in self.asNumbers()])) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + + # Immutable sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + + def __getitem__(self, i): + if i.__class__ is slice: + return self.clone(self._value[i]) + else: + return self._value[i] + + def __iter__(self): + return iter(self._value) + + def __contains__(self, value): + return value in self._value + + def __add__(self, value): + return self.clone(self._value + self.prettyIn(value)) + + def __radd__(self, value): + return self.clone(self.prettyIn(value) + self._value) + + def __mul__(self, value): + return self.clone(self._value * value) + + def __rmul__(self, value): + return self * value + + def __int__(self): + return int(self._value) + + def __float__(self): + return float(self._value) + + def __reversed__(self): + return reversed(self._value) + + +class Null(OctetString): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`str` objects (always empty). + + Parameters + ---------- + value : :class:`str` or :py:class:`~pyasn1.type.univ.Null` object + Python empty string literal or *Null* class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + defaultValue = ''.encode() # This is tightly constrained + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05) + ) + subtypeSpec = OctetString.subtypeSpec + constraint.SingleValueConstraint(octets.str2octs('')) + + # Optimization for faster codec lookup + typeId = OctetString.getTypeId() + + def clone(self, value=noValue, tagSet=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : :py:class:`~pyasn1.type.univ.Null` + new instance of NULL type/value + """ + return OctetString.clone(self, value, tagSet) + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + Returns + ------- + : :py:class:`~pyasn1.type.univ.Null` + new instance of NULL type/value + """ + return OctetString.subtype(self, value, implicitTag, explicitTag) + + +if sys.version_info[0] <= 2: + intTypes = (int, long) +else: + intTypes = (int,) + +numericTypes = intTypes + (float,) + + +class ObjectIdentifier(base.AbstractSimpleAsn1Item): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`tuple` objects (tuple of non-negative integers). + + Parameters + ---------- + value: :class:`tuple`, :class:`str` or |ASN.1| object + Python sequence of :class:`int` or string literal or |ASN.1| object. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + def __add__(self, other): + return self.clone(self._value + other) + + def __radd__(self, other): + return self.clone(other + self._value) + + def asTuple(self): + return self._value + + # Sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + + def __getitem__(self, i): + if i.__class__ is slice: + return self.clone(self._value[i]) + else: + return self._value[i] + + def __iter__(self): + return iter(self._value) + + def __contains__(self, value): + return value in self._value + + def __str__(self): + return self.prettyPrint() + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.prettyPrint()) + + def index(self, suboid): + return self._value.index(suboid) + + def isPrefixOf(self, other): + """Indicate if this |ASN.1| object is a prefix of other |ASN.1| object. + + Parameters + ---------- + other: |ASN.1| object + |ASN.1| object + + Returns + ------- + : :class:`bool` + :class:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object + or :class:`False` otherwise. + """ + l = len(self) + if l <= len(other): + if self._value[:l] == other[:l]: + return True + return False + + def prettyIn(self, value): + if isinstance(value, ObjectIdentifier): + return tuple(value) + elif octets.isStringType(value): + if '-' in value: + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) + ) + try: + return tuple([int(subOid) for subOid in value.split('.') if subOid]) + except ValueError: + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) + ) + + try: + tupleOfInts = tuple([int(subOid) for subOid in value if subOid >= 0]) + + except (ValueError, TypeError): + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) + ) + + if len(tupleOfInts) == len(value): + return tupleOfInts + + raise error.PyAsn1Error('Malformed Object ID %s at %s' % (value, self.__class__.__name__)) + + def prettyOut(self, value): + return '.'.join([str(x) for x in value]) + + +class Real(base.AbstractSimpleAsn1Item): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`float` objects. + Additionally, |ASN.1| objects behave like a :class:`tuple` in which case its + elements are mantissa, base and exponent. + + Parameters + ---------- + value: :class:`tuple`, :class:`float` or |ASN.1| object + Python sequence of :class:`int` (representing mantissa, base and + exponent) or float instance or *Real* class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + + """ + binEncBase = None # binEncBase = 16 is recommended for large numbers + + try: + _plusInf = float('inf') + _minusInf = float('-inf') + _inf = (_plusInf, _minusInf) + except ValueError: + # Infinity support is platform and Python dependent + _plusInf = _minusInf = None + _inf = () + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`float` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + return base.AbstractSimpleAsn1Item.clone(self, value, tagSet, subtypeSpec) + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`float` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + return base.AbstractSimpleAsn1Item.subtype(self, value, implicitTag, explicitTag) + + @staticmethod + def __normalizeBase10(value): + m, b, e = value + while m and m % 10 == 0: + m /= 10 + e += 1 + return m, b, e + + def prettyIn(self, value): + if isinstance(value, tuple) and len(value) == 3: + if not isinstance(value[0], numericTypes) or \ + not isinstance(value[1], intTypes) or \ + not isinstance(value[2], intTypes): + raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,)) + if isinstance(value[0], float) and \ + self._inf and value[0] in self._inf: + return value[0] + if value[1] not in (2, 10): + raise error.PyAsn1Error( + 'Prohibited base for Real value: %s' % (value[1],) + ) + if value[1] == 10: + value = self.__normalizeBase10(value) + return value + elif isinstance(value, intTypes): + return self.__normalizeBase10((value, 10, 0)) + elif isinstance(value, float) or octets.isStringType(value): + if octets.isStringType(value): + try: + value = float(value) + except ValueError: + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) + if self._inf and value in self._inf: + return value + else: + e = 0 + while int(value) != value: + value *= 10 + e -= 1 + return self.__normalizeBase10((int(value), 10, e)) + elif isinstance(value, Real): + return tuple(value) + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) + + def prettyOut(self, value): + if value in self._inf: + return '\'%s\'' % value + else: + return str(value) + + def prettyPrint(self, scope=0): + if self.isInfinity(): + return self.prettyOut(self._value) + else: + return str(float(self)) + + def isPlusInfinity(self): + """Indicate PLUS-INFINITY object value + + Returns + ------- + : :class:`bool` + :class:`True` if calling object represents plus infinity + or :class:`False` otherwise. + + """ + return self._value == self._plusInf + + def isMinusInfinity(self): + """Indicate MINUS-INFINITY object value + + Returns + ------- + : :class:`bool` + :class:`True` if calling object represents minus infinity + or :class:`False` otherwise. + """ + return self._value == self._minusInf + + def isInfinity(self): + return self._value in self._inf + + def __str__(self): + return str(float(self)) + + def __add__(self, value): + return self.clone(float(self) + value) + + def __radd__(self, value): + return self + value + + def __mul__(self, value): + return self.clone(float(self) * value) + + def __rmul__(self, value): + return self * value + + def __sub__(self, value): + return self.clone(float(self) - value) + + def __rsub__(self, value): + return self.clone(value - float(self)) + + def __mod__(self, value): + return self.clone(float(self) % value) + + def __rmod__(self, value): + return self.clone(value % float(self)) + + def __pow__(self, value, modulo=None): + return self.clone(pow(float(self), value, modulo)) + + def __rpow__(self, value): + return self.clone(pow(value, float(self))) + + if sys.version_info[0] <= 2: + def __div__(self, value): + return self.clone(float(self) / value) + + def __rdiv__(self, value): + return self.clone(value / float(self)) + else: + def __truediv__(self, value): + return self.clone(float(self) / value) + + def __rtruediv__(self, value): + return self.clone(value / float(self)) + + def __divmod__(self, value): + return self.clone(float(self) // value) + + def __rdivmod__(self, value): + return self.clone(value // float(self)) + + def __int__(self): + return int(float(self)) + + if sys.version_info[0] <= 2: + def __long__(self): return long(float(self)) + + def __float__(self): + if self._value in self._inf: + return self._value + else: + return float( + self._value[0] * pow(self._value[1], self._value[2]) + ) + + def __abs__(self): + return self.clone(abs(float(self))) + + def __pos__(self): + return self.clone(+float(self)) + + def __neg__(self): + return self.clone(-float(self)) + + def __round__(self, n=0): + r = round(float(self), n) + if n: + return self.clone(r) + else: + return r + + def __floor__(self): + return self.clone(math.floor(float(self))) + + def __ceil__(self): + return self.clone(math.ceil(float(self))) + + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(float(self))) + + def __lt__(self, value): + return float(self) < value + + def __le__(self, value): + return float(self) <= value + + def __eq__(self, value): + return float(self) == value + + def __ne__(self, value): + return float(self) != value + + def __gt__(self, value): + return float(self) > value + + def __ge__(self, value): + return float(self) >= value + + if sys.version_info[0] <= 2: + def __nonzero__(self): + return bool(float(self)) + else: + def __bool__(self): + return bool(float(self)) + + __hash__ = base.AbstractSimpleAsn1Item.__hash__ + + def __getitem__(self, idx): + if self._value in self._inf: + raise error.PyAsn1Error('Invalid infinite value operation') + else: + return self._value[idx] + + +class Enumerated(Integer): + __doc__ = Integer.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = Integer.getTypeId() + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers + namedValues = namedval.NamedValues() + + +# "Structured" ASN.1 types + +class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): + """Create |ASN.1| type. + + |ASN.1| objects are mutable and duck-type Python :class:`list` objects. + + Parameters + ---------- + componentType : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A pyasn1 object representing ASN.1 type allowed within |ASN.1| type + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing collection size constraint + """ + + # Python list protocol + + def clear(self): + self._componentValues = [] + + def append(self, value): + self[len(self)] = value + + def count(self, value): + return self._componentValues.count(value) + + def extend(self, values): + for value in values: + self.append(value) + + def index(self, value, start=0, stop=None): + if stop is None: + stop = len(self) + return self._componentValues.index(value, start, stop) + + def reverse(self): + self._componentValues.reverse() + + def sort(self, key=None, reverse=False): + self._componentValues.sort(key=key, reverse=reverse) + + def __iter__(self): + return iter(self._componentValues) + + def _cloneComponentValues(self, myClone, cloneValueFlag): + for idx, componentValue in enumerate(self._componentValues): + if componentValue is not None: + if isinstance(componentValue, base.AbstractConstructedAsn1Item): + myClone.setComponentByPosition( + idx, componentValue.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByPosition(idx, componentValue.clone()) + + def getComponentByPosition(self, idx): + """Return |ASN.1| type component value by position. + + Equivalent to Python sequence subscription operation (e.g. `[]`). + + Parameters + ---------- + idx : :class:`int` + Component index (zero-based). Must either refer to an existing + component or to N+1 component (of *componentType is set). In the latter + case a new component type gets instantiated and appended to the |ASN.1| + sequence. + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a pyasn1 object + """ + try: + return self._componentValues[idx] + except IndexError: + self.setComponentByPosition(idx) + return self._componentValues[idx] + + def setComponentByPosition(self, idx, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by position. + + Equivalent to Python sequence item assignment operation (e.g. `[]`) + or list.append() (when idx == len(self)). + + Parameters + ---------- + idx: :class:`int` + Component index (zero-based). Must either refer to existing + component or to N+1 component. In the latter case a new component + type gets instantiated (if *componentType* is set, or given ASN.1 + object is taken otherwise) and appended to the |ASN.1| sequence. + + value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints: :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + + Raises + ------ + IndexError: + When idx > len(self) + """ + componentType = self._componentType + + try: + currentValue = self._componentValues[idx] + except IndexError: + currentValue = None + + if len(self._componentValues) < idx: + raise error.PyAsn1Error('Component index out of range') + + if value is None or value is noValue: + if componentType is not None: + value = componentType.clone() + elif currentValue is None: + raise error.PyAsn1Error('Component type not defined') + elif not isinstance(value, base.Asn1Item): + if componentType is not None and isinstance(componentType, base.AbstractSimpleAsn1Item): + value = componentType.clone(value=value) + elif currentValue is not None and isinstance(currentValue, base.AbstractSimpleAsn1Item): + value = currentValue.clone(value=value) + else: + raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__) + elif componentType is not None: + if self.strictConstraints: + if not componentType.isSameTypeWith(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + else: + if not componentType.isSuperTypeOf(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + + if verifyConstraints and value.isValue: + try: + self._subtypeSpec(value, idx) + + except error.PyAsn1Error: + exType, exValue, exTb = sys.exc_info() + raise exType('%s at %s' % (exValue, self.__class__.__name__)) + + if currentValue is None: + self._componentValues.append(value) + else: + self._componentValues[idx] = value + + return self + + @property + def componentTagMap(self): + if self._componentType is not None: + return self._componentType.tagMap + + def prettyPrint(self, scope=0): + scope += 1 + representation = self.__class__.__name__ + ':\n' + for idx in range(len(self._componentValues)): + representation += ' ' * scope + if self._componentValues[idx] is None: + representation += '' + else: + representation += self._componentValues[idx].prettyPrint(scope) + return representation + + def prettyPrintType(self, scope=0): + scope += 1 + representation = '%s -> %s {\n' % (self.tagSet, self.__class__.__name__) + if self._componentType is not None: + representation += ' ' * scope + representation += self._componentType.prettyPrintType(scope) + return representation + '\n' + ' ' * (scope - 1) + '}' + + + @property + def isValue(self): + """Indicate if |ASN.1| object components represent ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if all |ASN.1| components represent value and type, + :class:`False` if at least one |ASN.1| component represents just ASN.1 type. + """ + if not self._componentValues: + return False + + for componentValue in self._componentValues: + if not componentValue.isValue: + return False + + return True + + +class SequenceOf(SequenceOfAndSetOfBase): + __doc__ = SequenceOfAndSetOfBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) + ) + + #: Default :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = None + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing size constraint on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = SequenceOfAndSetOfBase.getTypeId() + + +class SetOf(SequenceOfAndSetOfBase): + __doc__ = SequenceOfAndSetOfBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) + ) + + #: Default :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = None + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing size constraint on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = SequenceOfAndSetOfBase.getTypeId() + + +class SequenceAndSetBase(base.AbstractConstructedAsn1Item): + """Create |ASN.1| type. + + |ASN.1| objects are mutable and duck-type Python :class:`dict` objects. + + Parameters + ---------- + componentType: :py:class:`~pyasn1.type.namedtype.NamedType` + Object holding named ASN.1 types allowed within this collection + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing collection size constraint + """ + #: Default :py:class:`~pyasn1.type.namedtype.NamedTypes` + #: object representing named ASN.1 types allowed within |ASN.1| type + componentType = namedtype.NamedTypes() + + def __init__(self, componentType=None, tagSet=None, + subtypeSpec=None, sizeSpec=None): + if componentType is None: + componentType = self.componentType + base.AbstractConstructedAsn1Item.__init__( + self, componentType, tagSet, subtypeSpec, sizeSpec + ) + self._componentTypeLen = len(self._componentType) + + def __getitem__(self, idx): + if octets.isStringType(idx): + return self.getComponentByName(idx) + else: + return base.AbstractConstructedAsn1Item.__getitem__(self, idx) + + def __setitem__(self, idx, value): + if octets.isStringType(idx): + self.setComponentByName(idx, value) + else: + base.AbstractConstructedAsn1Item.__setitem__(self, idx, value) + + def __contains__(self, key): + return key in self._componentType + + def __iter__(self): + return iter(self._componentType) + + # Python dict protocol + + def values(self): + for idx in range(self._componentTypeLen): + yield self[idx] + + def keys(self): + return iter(self._componentType) + + def items(self): + for idx in range(self._componentTypeLen): + yield self._componentType[idx].getName(), self[idx] + + def update(self, *iterValue, **mappingValue): + for k, v in iterValue: + self[k] = v + for k in mappingValue: + self[k] = mappingValue[k] + + def clear(self): + self._componentValues = [] + + def _cloneComponentValues(self, myClone, cloneValueFlag): + for idx, componentValue in enumerate(self._componentValues): + if componentValue is not None: + if isinstance(componentValue, base.AbstractConstructedAsn1Item): + myClone.setComponentByPosition( + idx, componentValue.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByPosition(idx, componentValue.clone()) + + def getComponentByName(self, name): + """Returns |ASN.1| type component by name. + + Equivalent to Python :class:`dict` subscription operation (e.g. `[]`). + + Parameters + ---------- + name : :class:`str` + |ASN.1| type component name + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + Instantiate |ASN.1| component type or return existing component value + """ + return self.getComponentByPosition( + self._componentType.getPositionByName(name) + ) + + def setComponentByName(self, name, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by name. + + Equivalent to Python :class:`dict` item assignment operation (e.g. `[]`). + + Parameters + ---------- + name: :class:`str` + |ASN.1| type component name + + value : :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints: :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + """ + return self.setComponentByPosition( + self._componentType.getPositionByName(name), value, verifyConstraints, matchTags, matchConstraints + ) + + def getComponentByPosition(self, idx): + """Returns |ASN.1| type component by index. + + Equivalent to Python sequence subscription operation (e.g. `[]`). + + Parameters + ---------- + idx : :class:`int` + Component index (zero-based). Must either refer to an existing + component or (if *componentType* is set) new ASN.1 type object gets + instantiated. + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a PyASN1 object + """ + try: + componentValue = self._componentValues[idx] + except IndexError: + componentValue = None + + if componentValue is None: + self.setComponentByPosition(idx) + + return self._componentValues[idx] + + def setComponentByPosition(self, idx, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by position. + + Equivalent to Python sequence item assignment operation (e.g. `[]`). + + Parameters + ---------- + idx : :class:`int` + Component index (zero-based). Must either refer to existing + component (if *componentType* is set) or to N+1 component + otherwise. In the latter case a new component of given ASN.1 + type gets instantiated and appended to |ASN.1| sequence. + + value : :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints : :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + """ + componentType = self._componentType + componentTypeLen = self._componentTypeLen + + try: + currentValue = self._componentValues[idx] + except IndexError: + currentValue = None + if componentTypeLen: + if componentTypeLen < idx: + raise IndexError('component index out of range') + self._componentValues = [None] * componentTypeLen + + if value is None or value is noValue: + if componentTypeLen: + value = componentType.getTypeByPosition(idx).clone() + elif currentValue is None: + raise error.PyAsn1Error('Component type not defined') + elif not isinstance(value, base.Asn1Item): + if componentTypeLen: + subComponentType = componentType.getTypeByPosition(idx) + if isinstance(subComponentType, base.AbstractSimpleAsn1Item): + value = subComponentType.clone(value=value) + else: + raise error.PyAsn1Error('%s can cast only scalar values' % componentType.__class__.__name__) + elif currentValue is not None and isinstance(currentValue, base.AbstractSimpleAsn1Item): + value = currentValue.clone(value=value) + else: + raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__) + elif (matchTags or matchConstraints) and componentTypeLen: + subComponentType = componentType.getTypeByPosition(idx) + if subComponentType is not None: + if self.strictConstraints: + if not subComponentType.isSameTypeWith(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + else: + if not subComponentType.isSuperTypeOf(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + + if verifyConstraints and value.isValue: + try: + self._subtypeSpec(value, idx) + + except error.PyAsn1Error: + exType, exValue, exTb = sys.exc_info() + raise exType('%s at %s' % (exValue, self.__class__.__name__)) + + if componentTypeLen: + self._componentValues[idx] = value + elif len(self._componentValues) == idx: + self._componentValues.append(value) + else: + raise error.PyAsn1Error('Component index out of range') + + return self + + def getNameByPosition(self, idx): + if self._componentTypeLen: + return self._componentType.getNameByPosition(idx) + + def getComponentType(self): + if self._componentTypeLen: + return self._componentType + + @property + def isValue(self): + """Indicate if |ASN.1| object components represent ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if all |ASN.1| components represent value and type, + :class:`False` if at least one |ASN.1| component represents just ASN.1 type. + """ + componentType = self._componentType + + if componentType: + for idx, subComponentType in enumerate(componentType.namedTypes): + if subComponentType.isDefaulted or subComponentType.isOptional: + continue + if not self._componentValues or self._componentValues[idx] is None or not self._componentValues[idx].isValue: + return False + + else: + for componentValue in self._componentValues: + if not componentValue.isValue: + return False + + return True + + def prettyPrint(self, scope=0): + """Return an object representation string. + + Returns + ------- + : :class:`str` + Human-friendly object representation. + """ + scope += 1 + representation = self.__class__.__name__ + ':\n' + for idx in range(len(self._componentValues)): + if self._componentValues[idx] is not None: + representation += ' ' * scope + componentType = self.getComponentType() + if componentType is None: + representation += '' + else: + representation += componentType.getNameByPosition(idx) + representation = '%s=%s\n' % ( + representation, self._componentValues[idx].prettyPrint(scope) + ) + return representation + + def prettyPrintType(self, scope=0): + scope += 1 + representation = '%s -> %s {\n' % (self.tagSet, self.__class__.__name__) + for idx in range(len(self.componentType)): + representation += ' ' * scope + representation += '"%s"' % self.componentType.getNameByPosition(idx) + representation = '%s = %s\n' % ( + representation, self._componentType.getTypeByPosition(idx).prettyPrintType(scope) + ) + return representation + '\n' + ' ' * (scope - 1) + '}' + + # backward compatibility -- no-op + def setDefaultComponents(self): + return self + + +class Sequence(SequenceAndSetBase): + __doc__ = SequenceAndSetBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing constraints on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`) + #: object imposing size constraint on |ASN.1| objects + componentType = namedtype.NamedTypes() + + # Disambiguation ASN.1 types identification + typeId = SequenceAndSetBase.getTypeId() + + def getComponentTagMapNearPosition(self, idx): + if self._componentType: + return self._componentType.getTagMapNearPosition(idx) + + def getComponentPositionNearType(self, tagSet, idx): + if self._componentType: + return self._componentType.getPositionNearType(tagSet, idx) + else: + return idx + + +class Set(SequenceAndSetBase): + __doc__ = SequenceAndSetBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) + ) + + #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`) + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = namedtype.NamedTypes() + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing constraints on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = SequenceAndSetBase.getTypeId() + + def getComponent(self, innerFlag=False): + return self + + def getComponentByType(self, tagSet, innerFlag=False): + """Returns |ASN.1| type component by ASN.1 tag. + + Parameters + ---------- + tagSet : :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tags to identify one of + |ASN.1| object component + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a pyasn1 object + """ + component = self.getComponentByPosition( + self._componentType.getPositionByType(tagSet) + ) + if innerFlag and isinstance(component, Set): + # get inner component by inner tagSet + return component.getComponent(innerFlag=True) + else: + # get outer component by inner tagSet + return component + + def setComponentByType(self, tagSet, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True, + innerFlag=False): + """Assign |ASN.1| type component by ASN.1 tag. + + Parameters + ---------- + tagSet : :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tags to identify one of + |ASN.1| object component + + value : :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints : :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + innerFlag: :class:`bool` + If `True`, search for matching *tagSet* recursively. + + Returns + ------- + self + """ + idx = self._componentType.getPositionByType(tagSet) + + if innerFlag: # set inner component by inner tagSet + componentType = self._componentType.getTypeByPosition(idx) + + if componentType.tagSet: + return self.setComponentByPosition( + idx, value, verifyConstraints, matchTags, matchConstraints + ) + else: + componentType = self.getComponentByPosition(idx) + return componentType.setComponentByType( + tagSet, value, verifyConstraints, matchTags, matchConstraints, innerFlag=innerFlag + ) + else: # set outer component by inner tagSet + return self.setComponentByPosition( + idx, value, verifyConstraints, matchTags, matchConstraints + ) + + @property + def componentTagMap(self): + if self._componentType: + return self._componentType.getTagMap(True) + + def getComponentPositionByType(self, tagSet): + if self._componentType: + return self._componentType.getPositionByType(tagSet) + + +class Choice(Set): + __doc__ = Set.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.TagSet() # untagged + + #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`) + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = namedtype.NamedTypes() + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing size constraint on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection( + constraint.ValueSizeConstraint(1, 1) + ) + + # Disambiguation ASN.1 types identification + typeId = Set.getTypeId() + + _currentIdx = None + + def __eq__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] == other + return NotImplemented + + def __ne__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] != other + return NotImplemented + + def __lt__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] < other + return NotImplemented + + def __le__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] <= other + return NotImplemented + + def __gt__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] > other + return NotImplemented + + def __ge__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] >= other + return NotImplemented + + if sys.version_info[0] <= 2: + def __nonzero__(self): + return self._componentValues and True or False + else: + def __bool__(self): + return self._componentValues and True or False + + def __len__(self): + return self._currentIdx is not None and 1 or 0 + + def __contains__(self, key): + if self._currentIdx is None: + return False + return key == self._componentType[self._currentIdx].getName() + + def __iter__(self): + if self._currentIdx is None: + raise StopIteration + yield self._componentType[self._currentIdx].getName() + + # Python dict protocol + + def values(self): + if self._currentIdx is not None: + yield self._componentValues[self._currentIdx] + + def keys(self): + if self._currentIdx is not None: + yield self._componentType[self._currentIdx].getName() + + def items(self): + if self._currentIdx is not None: + yield self._componentType[self._currentIdx].getName(), self[self._currentIdx] + + def verifySizeSpec(self): + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + + def _cloneComponentValues(self, myClone, cloneValueFlag): + try: + component = self.getComponent() + except error.PyAsn1Error: + pass + else: + if isinstance(component, Choice): + tagSet = component.effectiveTagSet + else: + tagSet = component.tagSet + if isinstance(component, base.AbstractConstructedAsn1Item): + myClone.setComponentByType( + tagSet, component.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByType(tagSet, component.clone()) + + def getComponentByPosition(self, idx): + __doc__ = Set.__doc__ + + if self._currentIdx is None or self._currentIdx != idx: + return Set.getComponentByPosition(self, idx) + + return self._componentValues[idx] + + def setComponentByPosition(self, idx, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by position. + + Equivalent to Python sequence item assignment operation (e.g. `[]`). + + Parameters + ---------- + idx: :class:`int` + Component index (zero-based). Must either refer to existing + component or to N+1 component. In the latter case a new component + type gets instantiated (if *componentType* is set, or given ASN.1 + object is taken otherwise) and appended to the |ASN.1| sequence. + + value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. Once a new value is + set to *idx* component, previous value is dropped. + + verifyConstraints : :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + """ + oldIdx = self._currentIdx + Set.setComponentByPosition(self, idx, value, verifyConstraints, matchTags, matchConstraints) + self._currentIdx = idx + if oldIdx is not None and oldIdx != idx: + self._componentValues[oldIdx] = None + return self + + def getMinTagSet(self): + if self._tagSet: + return self._tagSet + else: + return self._componentType.minTagSet + + @property + def effectiveTagSet(self): + """Return a :class:`~pyasn1.type.tag.TagSet` object of the currently initialized component or self (if |ASN.1| is tagged).""" + if self._tagSet: + return self._tagSet + else: + component = self.getComponent() + return component.effectiveTagSet + + @property + def tagMap(self): + """"Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping + ASN.1 tags to ASN.1 objects contained within callee. + """ + if self._tagSet: + return Set.tagMap.fget(self) + else: + return Set.componentTagMap.fget(self) + + def getComponent(self, innerFlag=0): + """Return currently assigned component of the |ASN.1| object. + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a PyASN1 object + """ + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + c = self._componentValues[self._currentIdx] + if innerFlag and isinstance(c, Choice): + return c.getComponent(innerFlag) + else: + return c + + def getName(self, innerFlag=False): + """Return the name of currently assigned component of the |ASN.1| object. + + Returns + ------- + : :py:class:`str` + |ASN.1| component name + """ + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + if innerFlag: + c = self._componentValues[self._currentIdx] + if isinstance(c, Choice): + return c.getName(innerFlag) + return self._componentType.getNameByPosition(self._currentIdx) + + @property + def isValue(self): + """Indicate if |ASN.1| component is set and represents ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if |ASN.1| component is set and represent value and type, + :class:`False` if |ASN.1| component is not set or it represents just ASN.1 type. + """ + if self._currentIdx is None: + return False + + return self._componentValues[self._currentIdx].isValue + + +class Any(OctetString): + __doc__ = OctetString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.TagSet() # untagged + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = OctetString.getTypeId() + + @property + def tagMap(self): + """"Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping + ASN.1 tags to ASN.1 objects contained within callee. + """ + try: + return self._tagMap + + except AttributeError: + self._tagMap = tagmap.TagMap( + {self.tagSet: self}, + {eoo.endOfOctets.tagSet: eoo.endOfOctets}, + self + ) + + return self._tagMap + +# XXX +# coercion rules? diff --git a/src/lib/pyasn1/type/useful.py b/src/lib/pyasn1/type/useful.py new file mode 100644 index 00000000..0b79a983 --- /dev/null +++ b/src/lib/pyasn1/type/useful.py @@ -0,0 +1,39 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import univ, char, tag + +__all__ = ['ObjectDescriptor', 'GeneralizedTime', 'UTCTime'] + +NoValue = univ.NoValue +noValue = univ.noValue + + +class ObjectDescriptor(char.GraphicString): + __doc__ = char.GraphicString.__doc__ + + #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects + tagSet = char.GraphicString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7) + ) + + +class GeneralizedTime(char.VisibleString): + __doc__ = char.GraphicString.__doc__ + + #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects + tagSet = char.VisibleString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24) + ) + + +class UTCTime(char.VisibleString): + __doc__ = char.GraphicString.__doc__ + + #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects + tagSet = char.VisibleString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23) + ) diff --git a/src/lib/pyaes/LICENSE.txt b/src/lib/pybitcointools/LICENSE similarity index 80% rename from src/lib/pyaes/LICENSE.txt rename to src/lib/pybitcointools/LICENSE index 0417a6c2..c47d4ad0 100644 --- a/src/lib/pyaes/LICENSE.txt +++ b/src/lib/pybitcointools/LICENSE @@ -1,6 +1,12 @@ +This code is public domain. Everyone has the right to do whatever they want +with it for any purpose. + +In case your jurisdiction does not consider the above disclaimer valid or +enforceable, here's an MIT license for you: + The MIT License (MIT) -Copyright (c) 2014 Richard Moore +Copyright (c) 2013 Vitalik Buterin Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +25,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/src/lib/pybitcointools/MANIFEST.in b/src/lib/pybitcointools/MANIFEST.in new file mode 100644 index 00000000..70656b68 --- /dev/null +++ b/src/lib/pybitcointools/MANIFEST.in @@ -0,0 +1 @@ +include bitcoin/english.txt diff --git a/src/lib/pybitcointools/README.md b/src/lib/pybitcointools/README.md new file mode 100644 index 00000000..2f2876e7 --- /dev/null +++ b/src/lib/pybitcointools/README.md @@ -0,0 +1,142 @@ +# Pybitcointools, Python library for Bitcoin signatures and transactions + +### Advantages: + +* Functions have a simple interface, inputting and outputting in standard formats +* No classes +* Many functions can be taken out and used individually +* Supports binary, hex and base58 +* Transaction deserialization format almost compatible with BitcoinJS +* Electrum and BIP0032 support +* Make and publish a transaction all in a single command line instruction +* Includes non-bitcoin-specific conversion and JSON utilities + +### Disadvantages: + +* Not a full node, has no idea what blocks are +* Relies on centralized service (blockchain.info) for blockchain operations, although operations do have backups (eligius, blockr.io) + +### Example usage (best way to learn :) ): + + > from bitcoin import * + > priv = sha256('some big long brainwallet password') + > priv + '57c617d9b4e1f7af6ec97ca2ff57e94a28279a7eedd4d12a99fa11170e94f5a4' + > pub = privtopub(priv) + > pub + '0420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9' + > addr = pubtoaddr(pub) + > addr + '1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6' + > h = history(addr) + > h + [{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}] + > outs = [{'value': 90000, 'address': '16iw1MQ1sy1DtRPYw3ao1bCamoyBJtRB4t'}] + > tx = mktx(h,outs) + > tx + '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f7970000000000ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000' + > tx2 = sign(tx,0,priv) + > tx2 + '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c0100000000ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000' + > tx3 = sign(tx2,1,priv) + > tx3 + '01000000025e46552eb977f908fa8b3ee9d2943a8fa6d96c3b768a5f250ce485acd8c7f797000000008b483045022100dd29d89a28451febb990fb1dafa21245b105140083ced315ebcdea187572b3990220713f2e554f384d29d7abfedf39f0eb92afba0ef46f374e49d43a728a0ff6046e01410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff71d1abe4352100d4d837ca96c1a16947b5444f0f3e0bc645c430f704bb06c84c010000008c4930460221008bbaaaf172adfefc3a1315dc7312c88645832ff76d52e0029d127e65bbeeabe1022100fdeb89658d503cf2737cedb4049e5070f689c50a9b6c85997d49e0787938f93901410420f34c2786b4bae593e22596631b025f3ff46e200fc1d4b52ef49bbdc2ed00b26c584b7e32523fb01be2294a1f8a5eb0cf71a203cc034ced46ea92a8df16c6e9ffffffff01905f0100000000001976a9143ec6c3ed8dfc3ceabcc1cbdb0c5aef4e2d02873c88ac00000000' + > pushtx(tx3) + 'Transaction Submitted' + +Or using the pybtctool command line interface: + + @vub: pybtctool random_electrum_seed + 484ccb566edb66c65dd0fd2e4d90ef65 + + @vub: pybtctool electrum_privkey 484ccb566edb66c65dd0fd2e4d90ef65 0 0 + 593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7 + + @vub: pybtctool electrum_mpk 484ccb566edb66c65dd0fd2e4d90ef65 + 484e42865b8e9a6ea8262fd1cde666b557393258ed598d842e563ad9e5e6c70a97e387eefdef123c1b8b4eb21fe210c6216ad7cc1e4186fbbba70f0e2c062c25 + + @vub: pybtctool bip32_master_key 21456t243rhgtucyadh3wgyrcubw3grydfbng + xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT + + @vub: pybtctool bip32_ckd xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT 0 + xprv9vfzYrpwo7QHFdtrcvsSCTrBESFPUf1g7NRvayy1QkEfUekpDKLfqvHjgypF5w3nAvnwPjtQUNkyywWNkLbiUS95khfHCzJXFkLEdwRepbw + + @vub: pybtctool bip32_privtopub xprv9s21ZrQH143K2napkeoHT48gWmoJa89KCQj4nqLfdGybyWHP9Z8jvCGzuEDv4ihCyoed7RFPNbc9NxoSF7cAvH9AaNSvepUaeqbSpJZ4rbT + xpub661MyMwAqRbcFGfHrgLHpC5R4odnyasAZdefbDkHBcWarJcXh6SzTzbUkWuhnP142ZFdKdAJSuTSaiGDYjvm7bCLmA8DZqksYjJbYmcgrYF + +The -s option lets you read arguments from the command line + + @vub: pybtctool sha256 'some big long brainwallet password' | pybtctool -s privtoaddr | pybtctool -s history + [{'output': u'97f7c7d8ac85e40c255f8a763b6cd9a68f3a94d2e93e8bfa08f977b92e55465e:0', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}, {'output': u'4cc806bb04f730c445c60b3e0f4f44b54769a1c196ca37d8d4002135e4abd171:1', 'value': 50000, 'address': u'1CQLd3bhw4EzaURHbKCwM5YZbUQfA4ReY6'}] + @vub: pybtctool random_electrum_seed | pybtctool -s electrum_privkey 0 0 + 593240c2205e7b7b5d7c13393b7c9553497854b75c7470b76aeca50cd4a894d7 + +The -b option lets you read binary data as an argument + + @vub: pybtctool sha256 123 | pybtctool -s changebase 16 256 | pybtctool -b changebase 256 16 + a665a45920422f9d417e4867efdc4fb8a04a1f3fff1fa07e998e86f7f7a27ae30a + +The -j option lets you read json from the command line (-J to split a json list into multiple arguments) + + @vub: pybtctool unspent 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq | pybtctool -j select 200000001 | pybtctool -j mksend 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P:20000 1FxkfJQLJTXpW6QmxGT6oF43ZH959ns8Cq 1000 | pybtctool -s signall 805cd74ca322633372b9bfb857f3be41db0b8de43a3c44353b238c0acff9d523 + 0100000003d5001aae8358ae98cb02c1b6f9859dc1ac3dbc1e9cc88632afeb7b7e3c510a49000000008b4830450221009e03bb6122437767e2ca785535824f4ed13d2ebbb9fa4f9becc6d6f4e1e217dc022064577353c08d8d974250143d920d3b963b463e43bbb90f3371060645c49266b90141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff1529d655dff6a0f6c9815ee835312fb3ca4df622fde21b6b9097666e9284087d010000008a473044022035dd67d18b575ebd339d05ca6ffa1d27d7549bd993aeaf430985795459fc139402201aaa162cc50181cee493870c9479b1148243a33923cb77be44a73ca554a4e5d60141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff23d5f9cf0a8c233b35443c3ae48d0bdb41bef357b8bfb972336322a34cd75c80010000008b483045022014daa5c5bbe9b3e5f2539a5cd8e22ce55bc84788f946c5b3643ecac85b4591a9022100a4062074a1df3fa0aea5ef67368d0b1f0eaac520bee6e417c682d83cd04330450141048ef80f6bd6b073407a69299c2ba89de48adb59bb9689a5ab040befbbebcfbb15d01b006a6b825121a0d2c546c277acb60f0bd3203bd501b8d67c7dba91f27f47ffffffff02204e0000000000001976a914946cb2e08075bcbaf157e47bcb67eb2b2339d24288ac5b3c4411000000001976a914a41d15ae657ad3bfd0846771a34d7584c37d54a288ac00000000 + +Fun stuff with json: + + @vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j multiaccess value | pybtctool -j sum + 625216206372 + + @vub: pybtctool history 1EXoDusjGwvnjZUyKkxZ4UHEf77z6A5S4P | pybtctool -j count + 6198 + +### Listing of main commands: + +* privkey_to_pubkey : (privkey) -> pubkey +* privtopub : (privkey) -> pubkey +* pubkey_to_address : (pubkey) -> address +* pubtoaddr : (pubkey) -> address +* privkey_to_address : (privkey) -> address +* privtoaddr : (privkey) -> address + +* add : (key1, key2) -> key1 + key2 (works on privkeys or pubkeys) +* multiply : (pubkey, privkey) -> returns pubkey * privkey + +* ecdsa_sign : (message, privkey) -> sig +* ecdsa_verify : (message, sig, pubkey) -> True/False +* ecdsa_recover : (message, sig) -> pubkey + +* random_key : () -> privkey +* random_electrum_seed : () -> electrum seed + +* electrum_stretch : (seed) -> secret exponent +* electrum_privkey : (seed or secret exponent, i, type) -> privkey +* electrum_mpk : (seed or secret exponent) -> master public key +* electrum_pubkey : (seed or secexp or mpk) -> pubkey + +* bip32_master_key : (seed) -> bip32 master key +* bip32_ckd : (private or public bip32 key, i) -> child key +* bip32_privtopub : (private bip32 key) -> public bip32 key +* bip32_extract_key : (private or public bip32_key) -> privkey or pubkey + +* deserialize : (hex or bin transaction) -> JSON tx +* serialize : (JSON tx) -> hex or bin tx +* mktx : (inputs, outputs) -> tx +* mksend : (inputs, outputs, change_addr, fee) -> tx +* sign : (tx, i, privkey) -> tx with index i signed with privkey +* multisign : (tx, i, script, privkey) -> signature +* apply_multisignatures: (tx, i, script, sigs) -> tx with index i signed with sigs +* scriptaddr : (script) -> P2SH address +* mk_multisig_script : (pubkeys, k, n) -> k-of-n multisig script from pubkeys +* verify_tx_input : (tx, i, script, sig, pub) -> True/False +* tx_hash : (hex or bin tx) -> hash + +* history : (address1, address2, etc) -> outputs to those addresses +* unspent : (address1, address2, etc) -> unspent outputs to those addresses +* fetchtx : (txash) -> tx if present +* pushtx : (hex or bin tx) -> tries to push to blockchain.info/pushtx + +* access : (json list/object, prop) -> desired property of that json object +* multiaccess : (json list, prop) -> like access, but mapped across each list element +* slice : (json list, start, end) -> given slice of the list +* count : (json list) -> number of elements +* sum : (json list) -> sum of all values diff --git a/src/lib/pybitcointools/__init__.py b/src/lib/pybitcointools/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/lib/pybitcointools/bitcoin/__init__.py b/src/lib/pybitcointools/bitcoin/__init__.py new file mode 100644 index 00000000..7d529abc --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/__init__.py @@ -0,0 +1,10 @@ +from .py2specials import * +from .py3specials import * +from .main import * +from .transaction import * +from .deterministic import * +from .bci import * +from .composite import * +from .stealth import * +from .blocks import * +from .mnemonic import * diff --git a/src/lib/pybitcointools/bitcoin/bci.py b/src/lib/pybitcointools/bitcoin/bci.py new file mode 100644 index 00000000..79a2c401 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/bci.py @@ -0,0 +1,528 @@ +#!/usr/bin/python +import json, re +import random +import sys +try: + from urllib.request import build_opener +except: + from urllib2 import build_opener + + +# Makes a request to a given URL (first arg) and optional params (second arg) +def make_request(*args): + opener = build_opener() + opener.addheaders = [('User-agent', + 'Mozilla/5.0'+str(random.randrange(1000000)))] + try: + return opener.open(*args).read().strip() + except Exception as e: + try: + p = e.read().strip() + except: + p = e + raise Exception(p) + + +def is_testnet(inp): + '''Checks if inp is a testnet address or if UTXO is a known testnet TxID''' + if isinstance(inp, (list, tuple)) and len(inp) >= 1: + return any([is_testnet(x) for x in inp]) + elif not isinstance(inp, basestring): # sanity check + raise TypeError("Input must be str/unicode, not type %s" % str(type(inp))) + + if not inp or (inp.lower() in ("btc", "testnet")): + pass + + ## ADDRESSES + if inp[0] in "123mn": + if re.match("^[2mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp): + return True + elif re.match("^[13][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp): + return False + else: + #sys.stderr.write("Bad address format %s") + return None + + ## TXID + elif re.match('^[0-9a-fA-F]{64}$', inp): + base_url = "http://api.blockcypher.com/v1/btc/{network}/txs/{txid}?includesHex=false" + try: + # try testnet fetchtx + make_request(base_url.format(network="test3", txid=inp.lower())) + return True + except: + # try mainnet fetchtx + make_request(base_url.format(network="main", txid=inp.lower())) + return False + sys.stderr.write("TxID %s has no match for testnet or mainnet (Bad TxID)") + return None + else: + raise TypeError("{0} is unknown input".format(inp)) + + +def set_network(*args): + '''Decides if args for unspent/fetchtx/pushtx are mainnet or testnet''' + r = [] + for arg in args: + if not arg: + pass + if isinstance(arg, basestring): + r.append(is_testnet(arg)) + elif isinstance(arg, (list, tuple)): + return set_network(*arg) + if any(r) and not all(r): + raise Exception("Mixed Testnet/Mainnet queries") + return "testnet" if any(r) else "btc" + + +def parse_addr_args(*args): + # Valid input formats: unspent([addr1, addr2, addr3]) + # unspent([addr1, addr2, addr3], network) + # unspent(addr1, addr2, addr3) + # unspent(addr1, addr2, addr3, network) + addr_args = args + network = "btc" + if len(args) == 0: + return [], 'btc' + if len(args) >= 1 and args[-1] in ('testnet', 'btc'): + network = args[-1] + addr_args = args[:-1] + if len(addr_args) == 1 and isinstance(addr_args, list): + network = set_network(*addr_args[0]) + addr_args = addr_args[0] + if addr_args and isinstance(addr_args, tuple) and isinstance(addr_args[0], list): + addr_args = addr_args[0] + network = set_network(addr_args) + return network, addr_args + + +# Gets the unspent outputs of one or more addresses +def bci_unspent(*args): + network, addrs = parse_addr_args(*args) + u = [] + for a in addrs: + try: + data = make_request('https://blockchain.info/unspent?active='+a) + except Exception as e: + if str(e) == 'No free outputs to spend': + continue + else: + raise Exception(e) + try: + jsonobj = json.loads(data.decode("utf-8")) + for o in jsonobj["unspent_outputs"]: + h = o['tx_hash'].decode('hex')[::-1].encode('hex') + u.append({ + "output": h+':'+str(o['tx_output_n']), + "value": o['value'] + }) + except: + raise Exception("Failed to decode data: "+data) + return u + + +def blockr_unspent(*args): + # Valid input formats: blockr_unspent([addr1, addr2,addr3]) + # blockr_unspent(addr1, addr2, addr3) + # blockr_unspent([addr1, addr2, addr3], network) + # blockr_unspent(addr1, addr2, addr3, network) + # Where network is 'btc' or 'testnet' + network, addr_args = parse_addr_args(*args) + + if network == 'testnet': + blockr_url = 'http://tbtc.blockr.io/api/v1/address/unspent/' + elif network == 'btc': + blockr_url = 'http://btc.blockr.io/api/v1/address/unspent/' + else: + raise Exception( + 'Unsupported network {0} for blockr_unspent'.format(network)) + + if len(addr_args) == 0: + return [] + elif isinstance(addr_args[0], list): + addrs = addr_args[0] + else: + addrs = addr_args + res = make_request(blockr_url+','.join(addrs)) + data = json.loads(res.decode("utf-8"))['data'] + o = [] + if 'unspent' in data: + data = [data] + for dat in data: + for u in dat['unspent']: + o.append({ + "output": u['tx']+':'+str(u['n']), + "value": int(u['amount'].replace('.', '')) + }) + return o + + +def helloblock_unspent(*args): + addrs, network = parse_addr_args(*args) + if network == 'testnet': + url = 'https://testnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s' + elif network == 'btc': + url = 'https://mainnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s' + o = [] + for addr in addrs: + for offset in xrange(0, 10**9, 500): + res = make_request(url % (addr, offset)) + data = json.loads(res.decode("utf-8"))["data"] + if not len(data["unspents"]): + break + elif offset: + sys.stderr.write("Getting more unspents: %d\n" % offset) + for dat in data["unspents"]: + o.append({ + "output": dat["txHash"]+':'+str(dat["index"]), + "value": dat["value"], + }) + return o + + +unspent_getters = { + 'bci': bci_unspent, + 'blockr': blockr_unspent, + 'helloblock': helloblock_unspent +} + + +def unspent(*args, **kwargs): + f = unspent_getters.get(kwargs.get('source', ''), bci_unspent) + return f(*args) + + +# Gets the transaction output history of a given set of addresses, +# including whether or not they have been spent +def history(*args): + # Valid input formats: history([addr1, addr2,addr3]) + # history(addr1, addr2, addr3) + if len(args) == 0: + return [] + elif isinstance(args[0], list): + addrs = args[0] + else: + addrs = args + + txs = [] + for addr in addrs: + offset = 0 + while 1: + gathered = False + while not gathered: + try: + data = make_request( + 'https://blockchain.info/address/%s?format=json&offset=%s' % + (addr, offset)) + gathered = True + except Exception as e: + try: + sys.stderr.write(e.read().strip()) + except: + sys.stderr.write(str(e)) + gathered = False + try: + jsonobj = json.loads(data.decode("utf-8")) + except: + raise Exception("Failed to decode data: "+data) + txs.extend(jsonobj["txs"]) + if len(jsonobj["txs"]) < 50: + break + offset += 50 + sys.stderr.write("Fetching more transactions... "+str(offset)+'\n') + outs = {} + for tx in txs: + for o in tx["out"]: + if o.get('addr', None) in addrs: + key = str(tx["tx_index"])+':'+str(o["n"]) + outs[key] = { + "address": o["addr"], + "value": o["value"], + "output": tx["hash"]+':'+str(o["n"]), + "block_height": tx.get("block_height", None) + } + for tx in txs: + for i, inp in enumerate(tx["inputs"]): + if "prev_out" in inp: + if inp["prev_out"].get("addr", None) in addrs: + key = str(inp["prev_out"]["tx_index"]) + \ + ':'+str(inp["prev_out"]["n"]) + if outs.get(key): + outs[key]["spend"] = tx["hash"]+':'+str(i) + return [outs[k] for k in outs] + + +# Pushes a transaction to the network using https://blockchain.info/pushtx +def bci_pushtx(tx): + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + return make_request('https://blockchain.info/pushtx', 'tx='+tx) + + +def eligius_pushtx(tx): + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + s = make_request( + 'http://eligius.st/~wizkid057/newstats/pushtxn.php', + 'transaction='+tx+'&send=Push') + strings = re.findall('string[^"]*"[^"]*"', s) + for string in strings: + quote = re.findall('"[^"]*"', string)[0] + if len(quote) >= 5: + return quote[1:-1] + + +def blockr_pushtx(tx, network='btc'): + if network == 'testnet': + blockr_url = 'http://tbtc.blockr.io/api/v1/tx/push' + elif network == 'btc': + blockr_url = 'http://btc.blockr.io/api/v1/tx/push' + else: + raise Exception( + 'Unsupported network {0} for blockr_pushtx'.format(network)) + + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + return make_request(blockr_url, '{"hex":"%s"}' % tx) + + +def helloblock_pushtx(tx): + if not re.match('^[0-9a-fA-F]*$', tx): + tx = tx.encode('hex') + return make_request('https://mainnet.helloblock.io/v1/transactions', + 'rawTxHex='+tx) + +pushtx_getters = { + 'bci': bci_pushtx, + 'blockr': blockr_pushtx, + 'helloblock': helloblock_pushtx +} + + +def pushtx(*args, **kwargs): + f = pushtx_getters.get(kwargs.get('source', ''), bci_pushtx) + return f(*args) + + +def last_block_height(network='btc'): + if network == 'testnet': + data = make_request('http://tbtc.blockr.io/api/v1/block/info/last') + jsonobj = json.loads(data.decode("utf-8")) + return jsonobj["data"]["nb"] + + data = make_request('https://blockchain.info/latestblock') + jsonobj = json.loads(data.decode("utf-8")) + return jsonobj["height"] + + +# Gets a specific transaction +def bci_fetchtx(txhash): + if isinstance(txhash, list): + return [bci_fetchtx(h) for h in txhash] + if not re.match('^[0-9a-fA-F]*$', txhash): + txhash = txhash.encode('hex') + data = make_request('https://blockchain.info/rawtx/'+txhash+'?format=hex') + return data + + +def blockr_fetchtx(txhash, network='btc'): + if network == 'testnet': + blockr_url = 'http://tbtc.blockr.io/api/v1/tx/raw/' + elif network == 'btc': + blockr_url = 'http://btc.blockr.io/api/v1/tx/raw/' + else: + raise Exception( + 'Unsupported network {0} for blockr_fetchtx'.format(network)) + if isinstance(txhash, list): + txhash = ','.join([x.encode('hex') if not re.match('^[0-9a-fA-F]*$', x) + else x for x in txhash]) + jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8")) + return [d['tx']['hex'] for d in jsondata['data']] + else: + if not re.match('^[0-9a-fA-F]*$', txhash): + txhash = txhash.encode('hex') + jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8")) + return jsondata['data']['tx']['hex'] + + +def helloblock_fetchtx(txhash, network='btc'): + if isinstance(txhash, list): + return [helloblock_fetchtx(h) for h in txhash] + if not re.match('^[0-9a-fA-F]*$', txhash): + txhash = txhash.encode('hex') + if network == 'testnet': + url = 'https://testnet.helloblock.io/v1/transactions/' + elif network == 'btc': + url = 'https://mainnet.helloblock.io/v1/transactions/' + else: + raise Exception( + 'Unsupported network {0} for helloblock_fetchtx'.format(network)) + data = json.loads(make_request(url + txhash).decode("utf-8"))["data"]["transaction"] + o = { + "locktime": data["locktime"], + "version": data["version"], + "ins": [], + "outs": [] + } + for inp in data["inputs"]: + o["ins"].append({ + "script": inp["scriptSig"], + "outpoint": { + "index": inp["prevTxoutIndex"], + "hash": inp["prevTxHash"], + }, + "sequence": 4294967295 + }) + for outp in data["outputs"]: + o["outs"].append({ + "value": outp["value"], + "script": outp["scriptPubKey"] + }) + from .transaction import serialize + from .transaction import txhash as TXHASH + tx = serialize(o) + assert TXHASH(tx) == txhash + return tx + + +fetchtx_getters = { + 'bci': bci_fetchtx, + 'blockr': blockr_fetchtx, + 'helloblock': helloblock_fetchtx +} + + +def fetchtx(*args, **kwargs): + f = fetchtx_getters.get(kwargs.get('source', ''), bci_fetchtx) + return f(*args) + + +def firstbits(address): + if len(address) >= 25: + return make_request('https://blockchain.info/q/getfirstbits/'+address) + else: + return make_request( + 'https://blockchain.info/q/resolvefirstbits/'+address) + + +def get_block_at_height(height): + j = json.loads(make_request("https://blockchain.info/block-height/" + + str(height)+"?format=json").decode("utf-8")) + for b in j['blocks']: + if b['main_chain'] is True: + return b + raise Exception("Block at this height not found") + + +def _get_block(inp): + if len(str(inp)) < 64: + return get_block_at_height(inp) + else: + return json.loads(make_request( + 'https://blockchain.info/rawblock/'+inp).decode("utf-8")) + + +def bci_get_block_header_data(inp): + j = _get_block(inp) + return { + 'version': j['ver'], + 'hash': j['hash'], + 'prevhash': j['prev_block'], + 'timestamp': j['time'], + 'merkle_root': j['mrkl_root'], + 'bits': j['bits'], + 'nonce': j['nonce'], + } + +def blockr_get_block_header_data(height, network='btc'): + if network == 'testnet': + blockr_url = "http://tbtc.blockr.io/api/v1/block/raw/" + elif network == 'btc': + blockr_url = "http://btc.blockr.io/api/v1/block/raw/" + else: + raise Exception( + 'Unsupported network {0} for blockr_get_block_header_data'.format(network)) + + k = json.loads(make_request(blockr_url + str(height)).decode("utf-8")) + j = k['data'] + return { + 'version': j['version'], + 'hash': j['hash'], + 'prevhash': j['previousblockhash'], + 'timestamp': j['time'], + 'merkle_root': j['merkleroot'], + 'bits': int(j['bits'], 16), + 'nonce': j['nonce'], + } + + +def get_block_timestamp(height, network='btc'): + if network == 'testnet': + blockr_url = "http://tbtc.blockr.io/api/v1/block/info/" + elif network == 'btc': + blockr_url = "http://btc.blockr.io/api/v1/block/info/" + else: + raise Exception( + 'Unsupported network {0} for get_block_timestamp'.format(network)) + + import time, calendar + if isinstance(height, list): + k = json.loads(make_request(blockr_url + ','.join([str(x) for x in height])).decode("utf-8")) + o = {x['nb']: calendar.timegm(time.strptime(x['time_utc'], + "%Y-%m-%dT%H:%M:%SZ")) for x in k['data']} + return [o[x] for x in height] + else: + k = json.loads(make_request(blockr_url + str(height)).decode("utf-8")) + j = k['data']['time_utc'] + return calendar.timegm(time.strptime(j, "%Y-%m-%dT%H:%M:%SZ")) + + +block_header_data_getters = { + 'bci': bci_get_block_header_data, + 'blockr': blockr_get_block_header_data +} + + +def get_block_header_data(inp, **kwargs): + f = block_header_data_getters.get(kwargs.get('source', ''), + bci_get_block_header_data) + return f(inp, **kwargs) + + +def get_txs_in_block(inp): + j = _get_block(inp) + hashes = [t['hash'] for t in j['tx']] + return hashes + + +def get_block_height(txhash): + j = json.loads(make_request('https://blockchain.info/rawtx/'+txhash).decode("utf-8")) + return j['block_height'] + +# fromAddr, toAddr, 12345, changeAddress +def get_tx_composite(inputs, outputs, output_value, change_address=None, network=None): + """mktx using blockcypher API""" + inputs = [inputs] if not isinstance(inputs, list) else inputs + outputs = [outputs] if not isinstance(outputs, list) else outputs + network = set_network(change_address or inputs) if not network else network.lower() + url = "http://api.blockcypher.com/v1/btc/{network}/txs/new?includeToSignTx=true".format( + network=('test3' if network=='testnet' else 'main')) + is_address = lambda a: bool(re.match("^[123mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", a)) + if any([is_address(x) for x in inputs]): + inputs_type = 'addresses' # also accepts UTXOs, only addresses supported presently + if any([is_address(x) for x in outputs]): + outputs_type = 'addresses' # TODO: add UTXO support + data = { + 'inputs': [{inputs_type: inputs}], + 'confirmations': 0, + 'preference': 'high', + 'outputs': [{outputs_type: outputs, "value": output_value}] + } + if change_address: + data["change_address"] = change_address # + jdata = json.loads(make_request(url, data)) + hash, txh = jdata.get("tosign")[0], jdata.get("tosign_tx")[0] + assert bin_dbl_sha256(txh.decode('hex')).encode('hex') == hash, "checksum mismatch %s" % hash + return txh.encode("utf-8") + +blockcypher_mktx = get_tx_composite diff --git a/src/lib/pybitcointools/bitcoin/blocks.py b/src/lib/pybitcointools/bitcoin/blocks.py new file mode 100644 index 00000000..9df6b35c --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/blocks.py @@ -0,0 +1,50 @@ +from .main import * + + +def serialize_header(inp): + o = encode(inp['version'], 256, 4)[::-1] + \ + inp['prevhash'].decode('hex')[::-1] + \ + inp['merkle_root'].decode('hex')[::-1] + \ + encode(inp['timestamp'], 256, 4)[::-1] + \ + encode(inp['bits'], 256, 4)[::-1] + \ + encode(inp['nonce'], 256, 4)[::-1] + h = bin_sha256(bin_sha256(o))[::-1].encode('hex') + assert h == inp['hash'], (sha256(o), inp['hash']) + return o.encode('hex') + + +def deserialize_header(inp): + inp = inp.decode('hex') + return { + "version": decode(inp[:4][::-1], 256), + "prevhash": inp[4:36][::-1].encode('hex'), + "merkle_root": inp[36:68][::-1].encode('hex'), + "timestamp": decode(inp[68:72][::-1], 256), + "bits": decode(inp[72:76][::-1], 256), + "nonce": decode(inp[76:80][::-1], 256), + "hash": bin_sha256(bin_sha256(inp))[::-1].encode('hex') + } + + +def mk_merkle_proof(header, hashes, index): + nodes = [h.decode('hex')[::-1] for h in hashes] + if len(nodes) % 2 and len(nodes) > 2: + nodes.append(nodes[-1]) + layers = [nodes] + while len(nodes) > 1: + newnodes = [] + for i in range(0, len(nodes) - 1, 2): + newnodes.append(bin_sha256(bin_sha256(nodes[i] + nodes[i+1]))) + if len(newnodes) % 2 and len(newnodes) > 2: + newnodes.append(newnodes[-1]) + nodes = newnodes + layers.append(nodes) + # Sanity check, make sure merkle root is valid + assert nodes[0][::-1].encode('hex') == header['merkle_root'] + merkle_siblings = \ + [layers[i][(index >> i) ^ 1] for i in range(len(layers)-1)] + return { + "hash": hashes[index], + "siblings": [x[::-1].encode('hex') for x in merkle_siblings], + "header": header + } diff --git a/src/lib/pybitcointools/bitcoin/composite.py b/src/lib/pybitcointools/bitcoin/composite.py new file mode 100644 index 00000000..e5d50492 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/composite.py @@ -0,0 +1,128 @@ +from .main import * +from .transaction import * +from .bci import * +from .deterministic import * +from .blocks import * + + +# Takes privkey, address, value (satoshis), fee (satoshis) +def send(frm, to, value, fee=10000, **kwargs): + return sendmultitx(frm, to + ":" + str(value), fee, **kwargs) + + +# Takes privkey, "address1:value1,address2:value2" (satoshis), fee (satoshis) +def sendmultitx(frm, *args, **kwargs): + tv, fee = args[:-1], int(args[-1]) + outs = [] + outvalue = 0 + for a in tv: + outs.append(a) + outvalue += int(a.split(":")[1]) + + u = unspent(privtoaddr(frm), **kwargs) + u2 = select(u, int(outvalue)+int(fee)) + argz = u2 + outs + [privtoaddr(frm), fee] + tx = mksend(*argz) + tx2 = signall(tx, frm) + return pushtx(tx2, **kwargs) + + +# Takes address, address, value (satoshis), fee(satoshis) +def preparetx(frm, to, value, fee=10000, **kwargs): + tovalues = to + ":" + str(value) + return preparemultitx(frm, tovalues, fee, **kwargs) + + +# Takes address, address:value, address:value ... (satoshis), fee(satoshis) +def preparemultitx(frm, *args, **kwargs): + tv, fee = args[:-1], int(args[-1]) + outs = [] + outvalue = 0 + for a in tv: + outs.append(a) + outvalue += int(a.split(":")[1]) + + u = unspent(frm, **kwargs) + u2 = select(u, int(outvalue)+int(fee)) + argz = u2 + outs + [frm, fee] + return mksend(*argz) + + +# BIP32 hierarchical deterministic multisig script +def bip32_hdm_script(*args): + if len(args) == 3: + keys, req, path = args + else: + i, keys, path = 0, [], [] + while len(args[i]) > 40: + keys.append(args[i]) + i += 1 + req = int(args[i]) + path = map(int, args[i+1:]) + pubs = sorted(map(lambda x: bip32_descend(x, path), keys)) + return mk_multisig_script(pubs, req) + + +# BIP32 hierarchical deterministic multisig address +def bip32_hdm_addr(*args): + return scriptaddr(bip32_hdm_script(*args)) + + +# Setup a coinvault transaction +def setup_coinvault_tx(tx, script): + txobj = deserialize(tx) + N = deserialize_script(script)[-2] + for inp in txobj["ins"]: + inp["script"] = serialize_script([None] * (N+1) + [script]) + return serialize(txobj) + + +# Sign a coinvault transaction +def sign_coinvault_tx(tx, priv): + pub = privtopub(priv) + txobj = deserialize(tx) + subscript = deserialize_script(txobj['ins'][0]['script']) + oscript = deserialize_script(subscript[-1]) + k, pubs = oscript[0], oscript[1:-2] + for j in range(len(txobj['ins'])): + scr = deserialize_script(txobj['ins'][j]['script']) + for i, p in enumerate(pubs): + if p == pub: + scr[i+1] = multisign(tx, j, subscript[-1], priv) + if len(filter(lambda x: x, scr[1:-1])) >= k: + scr = [None] + filter(lambda x: x, scr[1:-1])[:k] + [scr[-1]] + txobj['ins'][j]['script'] = serialize_script(scr) + return serialize(txobj) + + +# Inspects a transaction +def inspect(tx, **kwargs): + d = deserialize(tx) + isum = 0 + ins = {} + for _in in d['ins']: + h = _in['outpoint']['hash'] + i = _in['outpoint']['index'] + prevout = deserialize(fetchtx(h, **kwargs))['outs'][i] + isum += prevout['value'] + a = script_to_address(prevout['script']) + ins[a] = ins.get(a, 0) + prevout['value'] + outs = [] + osum = 0 + for _out in d['outs']: + outs.append({'address': script_to_address(_out['script']), + 'value': _out['value']}) + osum += _out['value'] + return { + 'fee': isum - osum, + 'outs': outs, + 'ins': ins + } + + +def merkle_prove(txhash): + blocknum = str(get_block_height(txhash)) + header = get_block_header_data(blocknum) + hashes = get_txs_in_block(blocknum) + i = hashes.index(txhash) + return mk_merkle_proof(header, hashes, i) diff --git a/src/lib/pybitcointools/bitcoin/deterministic.py b/src/lib/pybitcointools/bitcoin/deterministic.py new file mode 100644 index 00000000..b2bdbbc6 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/deterministic.py @@ -0,0 +1,199 @@ +from .main import * +import hmac +import hashlib +from binascii import hexlify +# Electrum wallets + + +def electrum_stretch(seed): + return slowsha(seed) + +# Accepts seed or stretched seed, returns master public key + + +def electrum_mpk(seed): + if len(seed) == 32: + seed = electrum_stretch(seed) + return privkey_to_pubkey(seed)[2:] + +# Accepts (seed or stretched seed), index and secondary index +# (conventionally 0 for ordinary addresses, 1 for change) , returns privkey + + +def electrum_privkey(seed, n, for_change=0): + if len(seed) == 32: + seed = electrum_stretch(seed) + mpk = electrum_mpk(seed) + offset = dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+binascii.unhexlify(mpk)) + return add_privkeys(seed, offset) + +# Accepts (seed or stretched seed or master pubkey), index and secondary index +# (conventionally 0 for ordinary addresses, 1 for change) , returns pubkey + + +def electrum_pubkey(masterkey, n, for_change=0): + if len(masterkey) == 32: + mpk = electrum_mpk(electrum_stretch(masterkey)) + elif len(masterkey) == 64: + mpk = electrum_mpk(masterkey) + else: + mpk = masterkey + bin_mpk = encode_pubkey(mpk, 'bin_electrum') + offset = bin_dbl_sha256(from_int_representation_to_bytes(n)+b':'+from_int_representation_to_bytes(for_change)+b':'+bin_mpk) + return add_pubkeys('04'+mpk, privtopub(offset)) + +# seed/stretched seed/pubkey -> address (convenience method) + + +def electrum_address(masterkey, n, for_change=0, version=0): + return pubkey_to_address(electrum_pubkey(masterkey, n, for_change), version) + +# Given a master public key, a private key from that wallet and its index, +# cracks the secret exponent which can be used to generate all other private +# keys in the wallet + + +def crack_electrum_wallet(mpk, pk, n, for_change=0): + bin_mpk = encode_pubkey(mpk, 'bin_electrum') + offset = dbl_sha256(str(n)+':'+str(for_change)+':'+bin_mpk) + return subtract_privkeys(pk, offset) + +# Below code ASSUMES binary inputs and compressed pubkeys +MAINNET_PRIVATE = b'\x04\x88\xAD\xE4' +MAINNET_PUBLIC = b'\x04\x88\xB2\x1E' +TESTNET_PRIVATE = b'\x04\x35\x83\x94' +TESTNET_PUBLIC = b'\x04\x35\x87\xCF' +PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE] +PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC] + +# BIP32 child key derivation + + +def raw_bip32_ckd(rawtuple, i): + vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple + i = int(i) + + if vbytes in PRIVATE: + priv = key + pub = privtopub(key) + else: + pub = key + + if i >= 2**31: + if vbytes in PUBLIC: + raise Exception("Can't do private derivation on public key!") + I = hmac.new(chaincode, b'\x00'+priv[:32]+encode(i, 256, 4), hashlib.sha512).digest() + else: + I = hmac.new(chaincode, pub+encode(i, 256, 4), hashlib.sha512).digest() + + if vbytes in PRIVATE: + newkey = add_privkeys(I[:32]+B'\x01', priv) + fingerprint = bin_hash160(privtopub(key))[:4] + if vbytes in PUBLIC: + newkey = add_pubkeys(compress(privtopub(I[:32])), key) + fingerprint = bin_hash160(key)[:4] + + return (vbytes, depth + 1, fingerprint, i, I[32:], newkey) + + +def bip32_serialize(rawtuple): + vbytes, depth, fingerprint, i, chaincode, key = rawtuple + i = encode(i, 256, 4) + chaincode = encode(hash_to_int(chaincode), 256, 32) + keydata = b'\x00'+key[:-1] if vbytes in PRIVATE else key + bindata = vbytes + from_int_to_byte(depth % 256) + fingerprint + i + chaincode + keydata + return changebase(bindata+bin_dbl_sha256(bindata)[:4], 256, 58) + + +def bip32_deserialize(data): + dbin = changebase(data, 58, 256) + if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]: + raise Exception("Invalid checksum") + vbytes = dbin[0:4] + depth = from_byte_to_int(dbin[4]) + fingerprint = dbin[5:9] + i = decode(dbin[9:13], 256) + chaincode = dbin[13:45] + key = dbin[46:78]+b'\x01' if vbytes in PRIVATE else dbin[45:78] + return (vbytes, depth, fingerprint, i, chaincode, key) + + +def raw_bip32_privtopub(rawtuple): + vbytes, depth, fingerprint, i, chaincode, key = rawtuple + newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC + return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key)) + + +def bip32_privtopub(data): + return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data))) + + +def bip32_ckd(data, i): + return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i)) + + +def bip32_master_key(seed, vbytes=MAINNET_PRIVATE): + I = hmac.new(from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest() + return bip32_serialize((vbytes, 0, b'\x00'*4, 0, I[32:], I[:32]+b'\x01')) + + +def bip32_bin_extract_key(data): + return bip32_deserialize(data)[-1] + + +def bip32_extract_key(data): + return safe_hexlify(bip32_deserialize(data)[-1]) + +# Exploits the same vulnerability as above in Electrum wallets +# Takes a BIP32 pubkey and one of the child privkeys of its corresponding +# privkey and returns the BIP32 privkey associated with that pubkey + + +def raw_crack_bip32_privkey(parent_pub, priv): + vbytes, depth, fingerprint, i, chaincode, key = priv + pvbytes, pdepth, pfingerprint, pi, pchaincode, pkey = parent_pub + i = int(i) + + if i >= 2**31: + raise Exception("Can't crack private derivation!") + + I = hmac.new(pchaincode, pkey+encode(i, 256, 4), hashlib.sha512).digest() + + pprivkey = subtract_privkeys(key, I[:32]+b'\x01') + + newvbytes = MAINNET_PRIVATE if vbytes == MAINNET_PUBLIC else TESTNET_PRIVATE + return (newvbytes, pdepth, pfingerprint, pi, pchaincode, pprivkey) + + +def crack_bip32_privkey(parent_pub, priv): + dsppub = bip32_deserialize(parent_pub) + dspriv = bip32_deserialize(priv) + return bip32_serialize(raw_crack_bip32_privkey(dsppub, dspriv)) + + +def coinvault_pub_to_bip32(*args): + if len(args) == 1: + args = args[0].split(' ') + vals = map(int, args[34:]) + I1 = ''.join(map(chr, vals[:33])) + I2 = ''.join(map(chr, vals[35:67])) + return bip32_serialize((MAINNET_PUBLIC, 0, b'\x00'*4, 0, I2, I1)) + + +def coinvault_priv_to_bip32(*args): + if len(args) == 1: + args = args[0].split(' ') + vals = map(int, args[34:]) + I2 = ''.join(map(chr, vals[35:67])) + I3 = ''.join(map(chr, vals[72:104])) + return bip32_serialize((MAINNET_PRIVATE, 0, b'\x00'*4, 0, I2, I3+b'\x01')) + + +def bip32_descend(*args): + if len(args) == 2 and isinstance(args[1], list): + key, path = args + else: + key, path = args[0], map(int, args[1:]) + for p in path: + key = bip32_ckd(key, p) + return bip32_extract_key(key) diff --git a/src/lib/pybitcointools/bitcoin/english.txt b/src/lib/pybitcointools/bitcoin/english.txt new file mode 100644 index 00000000..942040ed --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/english.txt @@ -0,0 +1,2048 @@ +abandon +ability +able +about +above +absent +absorb +abstract +absurd +abuse +access +accident +account +accuse +achieve +acid +acoustic +acquire +across +act +action +actor +actress +actual +adapt +add +addict +address +adjust +admit +adult +advance +advice +aerobic +affair +afford +afraid +again +age +agent +agree +ahead +aim +air +airport +aisle +alarm +album +alcohol +alert +alien +all +alley +allow +almost +alone +alpha +already +also +alter +always +amateur +amazing +among +amount +amused +analyst +anchor +ancient +anger +angle +angry +animal +ankle +announce +annual +another +answer +antenna +antique +anxiety +any +apart +apology +appear +apple +approve +april +arch +arctic +area +arena +argue +arm +armed +armor +army +around +arrange +arrest +arrive +arrow +art +artefact +artist +artwork +ask +aspect +assault +asset +assist +assume +asthma +athlete +atom +attack +attend +attitude +attract +auction +audit +august +aunt +author +auto +autumn +average +avocado +avoid +awake +aware +away +awesome +awful +awkward +axis +baby +bachelor +bacon +badge +bag +balance +balcony +ball +bamboo +banana +banner +bar +barely +bargain +barrel +base +basic +basket +battle +beach +bean +beauty +because +become +beef +before +begin +behave +behind +believe +below +belt +bench +benefit +best +betray +better +between +beyond +bicycle +bid +bike +bind +biology +bird +birth +bitter +black +blade +blame +blanket +blast +bleak +bless +blind +blood +blossom +blouse +blue +blur +blush +board +boat +body +boil +bomb +bone +bonus +book +boost +border +boring +borrow +boss +bottom +bounce +box +boy +bracket +brain +brand +brass +brave +bread +breeze +brick +bridge +brief +bright +bring +brisk +broccoli +broken +bronze +broom +brother +brown +brush +bubble +buddy +budget +buffalo +build +bulb +bulk +bullet +bundle +bunker +burden +burger +burst +bus +business +busy +butter +buyer +buzz +cabbage +cabin +cable +cactus +cage +cake +call +calm +camera +camp +can +canal +cancel +candy +cannon +canoe +canvas +canyon +capable +capital +captain +car +carbon +card +cargo +carpet +carry +cart +case +cash +casino +castle +casual +cat +catalog +catch +category +cattle +caught +cause +caution +cave +ceiling +celery +cement +census +century +cereal +certain +chair +chalk +champion +change +chaos +chapter +charge +chase +chat +cheap +check +cheese +chef +cherry +chest +chicken +chief +child +chimney +choice +choose +chronic +chuckle +chunk +churn +cigar +cinnamon +circle +citizen +city +civil +claim +clap +clarify +claw +clay +clean +clerk +clever +click +client +cliff +climb +clinic +clip +clock +clog +close +cloth +cloud +clown +club +clump +cluster +clutch +coach +coast +coconut +code +coffee +coil +coin +collect +color +column +combine +come +comfort +comic +common +company +concert +conduct +confirm +congress +connect +consider +control +convince +cook +cool +copper +copy +coral +core +corn +correct +cost +cotton +couch +country +couple +course +cousin +cover +coyote +crack +cradle +craft +cram +crane +crash +crater +crawl +crazy +cream +credit +creek +crew +cricket +crime +crisp +critic +crop +cross +crouch +crowd +crucial +cruel +cruise +crumble +crunch +crush +cry +crystal +cube +culture +cup +cupboard +curious +current +curtain +curve +cushion +custom +cute +cycle +dad +damage +damp +dance +danger +daring +dash +daughter +dawn +day +deal +debate +debris +decade +december +decide +decline +decorate +decrease +deer +defense +define +defy +degree +delay +deliver +demand +demise +denial +dentist +deny +depart +depend +deposit +depth +deputy +derive +describe +desert +design +desk +despair +destroy +detail +detect +develop +device +devote +diagram +dial +diamond +diary +dice +diesel +diet +differ +digital +dignity +dilemma +dinner +dinosaur +direct +dirt +disagree +discover +disease +dish +dismiss +disorder +display +distance +divert +divide +divorce +dizzy +doctor +document +dog +doll +dolphin +domain +donate +donkey +donor +door +dose +double +dove +draft +dragon +drama +drastic +draw +dream +dress +drift +drill +drink +drip +drive +drop +drum +dry +duck +dumb +dune +during +dust +dutch +duty +dwarf +dynamic +eager +eagle +early +earn +earth +easily +east +easy +echo +ecology +economy +edge +edit +educate +effort +egg +eight +either +elbow +elder +electric +elegant +element +elephant +elevator +elite +else +embark +embody +embrace +emerge +emotion +employ +empower +empty +enable +enact +end +endless +endorse +enemy +energy +enforce +engage +engine +enhance +enjoy +enlist +enough +enrich +enroll +ensure +enter +entire +entry +envelope +episode +equal +equip +era +erase +erode +erosion +error +erupt +escape +essay +essence +estate +eternal +ethics +evidence +evil +evoke +evolve +exact +example +excess +exchange +excite +exclude +excuse +execute +exercise +exhaust +exhibit +exile +exist +exit +exotic +expand +expect +expire +explain +expose +express +extend +extra +eye +eyebrow +fabric +face +faculty +fade +faint +faith +fall +false +fame +family +famous +fan +fancy +fantasy +farm +fashion +fat +fatal +father +fatigue +fault +favorite +feature +february +federal +fee +feed +feel +female +fence +festival +fetch +fever +few +fiber +fiction +field +figure +file +film +filter +final +find +fine +finger +finish +fire +firm +first +fiscal +fish +fit +fitness +fix +flag +flame +flash +flat +flavor +flee +flight +flip +float +flock +floor +flower +fluid +flush +fly +foam +focus +fog +foil +fold +follow +food +foot +force +forest +forget +fork +fortune +forum +forward +fossil +foster +found +fox +fragile +frame +frequent +fresh +friend +fringe +frog +front +frost +frown +frozen +fruit +fuel +fun +funny +furnace +fury +future +gadget +gain +galaxy +gallery +game +gap +garage +garbage +garden +garlic +garment +gas +gasp +gate +gather +gauge +gaze +general +genius +genre +gentle +genuine +gesture +ghost +giant +gift +giggle +ginger +giraffe +girl +give +glad +glance +glare +glass +glide +glimpse +globe +gloom +glory +glove +glow +glue +goat +goddess +gold +good +goose +gorilla +gospel +gossip +govern +gown +grab +grace +grain +grant +grape +grass +gravity +great +green +grid +grief +grit +grocery +group +grow +grunt +guard +guess +guide +guilt +guitar +gun +gym +habit +hair +half +hammer +hamster +hand +happy +harbor +hard +harsh +harvest +hat +have +hawk +hazard +head +health +heart +heavy +hedgehog +height +hello +helmet +help +hen +hero +hidden +high +hill +hint +hip +hire +history +hobby +hockey +hold +hole +holiday +hollow +home +honey +hood +hope +horn +horror +horse +hospital +host +hotel +hour +hover +hub +huge +human +humble +humor +hundred +hungry +hunt +hurdle +hurry +hurt +husband +hybrid +ice +icon +idea +identify +idle +ignore +ill +illegal +illness +image +imitate +immense +immune +impact +impose +improve +impulse +inch +include +income +increase +index +indicate +indoor +industry +infant +inflict +inform +inhale +inherit +initial +inject +injury +inmate +inner +innocent +input +inquiry +insane +insect +inside +inspire +install +intact +interest +into +invest +invite +involve +iron +island +isolate +issue +item +ivory +jacket +jaguar +jar +jazz +jealous +jeans +jelly +jewel +job +join +joke +journey +joy +judge +juice +jump +jungle +junior +junk +just +kangaroo +keen +keep +ketchup +key +kick +kid +kidney +kind +kingdom +kiss +kit +kitchen +kite +kitten +kiwi +knee +knife +knock +know +lab +label +labor +ladder +lady +lake +lamp +language +laptop +large +later +latin +laugh +laundry +lava +law +lawn +lawsuit +layer +lazy +leader +leaf +learn +leave +lecture +left +leg +legal +legend +leisure +lemon +lend +length +lens +leopard +lesson +letter +level +liar +liberty +library +license +life +lift +light +like +limb +limit +link +lion +liquid +list +little +live +lizard +load +loan +lobster +local +lock +logic +lonely +long +loop +lottery +loud +lounge +love +loyal +lucky +luggage +lumber +lunar +lunch +luxury +lyrics +machine +mad +magic +magnet +maid +mail +main +major +make +mammal +man +manage +mandate +mango +mansion +manual +maple +marble +march +margin +marine +market +marriage +mask +mass +master +match +material +math +matrix +matter +maximum +maze +meadow +mean +measure +meat +mechanic +medal +media +melody +melt +member +memory +mention +menu +mercy +merge +merit +merry +mesh +message +metal +method +middle +midnight +milk +million +mimic +mind +minimum +minor +minute +miracle +mirror +misery +miss +mistake +mix +mixed +mixture +mobile +model +modify +mom +moment +monitor +monkey +monster +month +moon +moral +more +morning +mosquito +mother +motion +motor +mountain +mouse +move +movie +much +muffin +mule +multiply +muscle +museum +mushroom +music +must +mutual +myself +mystery +myth +naive +name +napkin +narrow +nasty +nation +nature +near +neck +need +negative +neglect +neither +nephew +nerve +nest +net +network +neutral +never +news +next +nice +night +noble +noise +nominee +noodle +normal +north +nose +notable +note +nothing +notice +novel +now +nuclear +number +nurse +nut +oak +obey +object +oblige +obscure +observe +obtain +obvious +occur +ocean +october +odor +off +offer +office +often +oil +okay +old +olive +olympic +omit +once +one +onion +online +only +open +opera +opinion +oppose +option +orange +orbit +orchard +order +ordinary +organ +orient +original +orphan +ostrich +other +outdoor +outer +output +outside +oval +oven +over +own +owner +oxygen +oyster +ozone +pact +paddle +page +pair +palace +palm +panda +panel +panic +panther +paper +parade +parent +park +parrot +party +pass +patch +path +patient +patrol +pattern +pause +pave +payment +peace +peanut +pear +peasant +pelican +pen +penalty +pencil +people +pepper +perfect +permit +person +pet +phone +photo +phrase +physical +piano +picnic +picture +piece +pig +pigeon +pill +pilot +pink +pioneer +pipe +pistol +pitch +pizza +place +planet +plastic +plate +play +please +pledge +pluck +plug +plunge +poem +poet +point +polar +pole +police +pond +pony +pool +popular +portion +position +possible +post +potato +pottery +poverty +powder +power +practice +praise +predict +prefer +prepare +present +pretty +prevent +price +pride +primary +print +priority +prison +private +prize +problem +process +produce +profit +program +project +promote +proof +property +prosper +protect +proud +provide +public +pudding +pull +pulp +pulse +pumpkin +punch +pupil +puppy +purchase +purity +purpose +purse +push +put +puzzle +pyramid +quality +quantum +quarter +question +quick +quit +quiz +quote +rabbit +raccoon +race +rack +radar +radio +rail +rain +raise +rally +ramp +ranch +random +range +rapid +rare +rate +rather +raven +raw +razor +ready +real +reason +rebel +rebuild +recall +receive +recipe +record +recycle +reduce +reflect +reform +refuse +region +regret +regular +reject +relax +release +relief +rely +remain +remember +remind +remove +render +renew +rent +reopen +repair +repeat +replace +report +require +rescue +resemble +resist +resource +response +result +retire +retreat +return +reunion +reveal +review +reward +rhythm +rib +ribbon +rice +rich +ride +ridge +rifle +right +rigid +ring +riot +ripple +risk +ritual +rival +river +road +roast +robot +robust +rocket +romance +roof +rookie +room +rose +rotate +rough +round +route +royal +rubber +rude +rug +rule +run +runway +rural +sad +saddle +sadness +safe +sail +salad +salmon +salon +salt +salute +same +sample +sand +satisfy +satoshi +sauce +sausage +save +say +scale +scan +scare +scatter +scene +scheme +school +science +scissors +scorpion +scout +scrap +screen +script +scrub +sea +search +season +seat +second +secret +section +security +seed +seek +segment +select +sell +seminar +senior +sense +sentence +series +service +session +settle +setup +seven +shadow +shaft +shallow +share +shed +shell +sheriff +shield +shift +shine +ship +shiver +shock +shoe +shoot +shop +short +shoulder +shove +shrimp +shrug +shuffle +shy +sibling +sick +side +siege +sight +sign +silent +silk +silly +silver +similar +simple +since +sing +siren +sister +situate +six +size +skate +sketch +ski +skill +skin +skirt +skull +slab +slam +sleep +slender +slice +slide +slight +slim +slogan +slot +slow +slush +small +smart +smile +smoke +smooth +snack +snake +snap +sniff +snow +soap +soccer +social +sock +soda +soft +solar +soldier +solid +solution +solve +someone +song +soon +sorry +sort +soul +sound +soup +source +south +space +spare +spatial +spawn +speak +special +speed +spell +spend +sphere +spice +spider +spike +spin +spirit +split +spoil +sponsor +spoon +sport +spot +spray +spread +spring +spy +square +squeeze +squirrel +stable +stadium +staff +stage +stairs +stamp +stand +start +state +stay +steak +steel +stem +step +stereo +stick +still +sting +stock +stomach +stone +stool +story +stove +strategy +street +strike +strong +struggle +student +stuff +stumble +style +subject +submit +subway +success +such +sudden +suffer +sugar +suggest +suit +summer +sun +sunny +sunset +super +supply +supreme +sure +surface +surge +surprise +surround +survey +suspect +sustain +swallow +swamp +swap +swarm +swear +sweet +swift +swim +swing +switch +sword +symbol +symptom +syrup +system +table +tackle +tag +tail +talent +talk +tank +tape +target +task +taste +tattoo +taxi +teach +team +tell +ten +tenant +tennis +tent +term +test +text +thank +that +theme +then +theory +there +they +thing +this +thought +three +thrive +throw +thumb +thunder +ticket +tide +tiger +tilt +timber +time +tiny +tip +tired +tissue +title +toast +tobacco +today +toddler +toe +together +toilet +token +tomato +tomorrow +tone +tongue +tonight +tool +tooth +top +topic +topple +torch +tornado +tortoise +toss +total +tourist +toward +tower +town +toy +track +trade +traffic +tragic +train +transfer +trap +trash +travel +tray +treat +tree +trend +trial +tribe +trick +trigger +trim +trip +trophy +trouble +truck +true +truly +trumpet +trust +truth +try +tube +tuition +tumble +tuna +tunnel +turkey +turn +turtle +twelve +twenty +twice +twin +twist +two +type +typical +ugly +umbrella +unable +unaware +uncle +uncover +under +undo +unfair +unfold +unhappy +uniform +unique +unit +universe +unknown +unlock +until +unusual +unveil +update +upgrade +uphold +upon +upper +upset +urban +urge +usage +use +used +useful +useless +usual +utility +vacant +vacuum +vague +valid +valley +valve +van +vanish +vapor +various +vast +vault +vehicle +velvet +vendor +venture +venue +verb +verify +version +very +vessel +veteran +viable +vibrant +vicious +victory +video +view +village +vintage +violin +virtual +virus +visa +visit +visual +vital +vivid +vocal +voice +void +volcano +volume +vote +voyage +wage +wagon +wait +walk +wall +walnut +want +warfare +warm +warrior +wash +wasp +waste +water +wave +way +wealth +weapon +wear +weasel +weather +web +wedding +weekend +weird +welcome +west +wet +whale +what +wheat +wheel +when +where +whip +whisper +wide +width +wife +wild +will +win +window +wine +wing +wink +winner +winter +wire +wisdom +wise +wish +witness +wolf +woman +wonder +wood +wool +word +work +world +worry +worth +wrap +wreck +wrestle +wrist +write +wrong +yard +year +yellow +you +young +youth +zebra +zero +zone +zoo diff --git a/src/lib/pybitcointools/bitcoin/main.py b/src/lib/pybitcointools/bitcoin/main.py new file mode 100644 index 00000000..8cf3a9f7 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/main.py @@ -0,0 +1,581 @@ +#!/usr/bin/python +from .py2specials import * +from .py3specials import * +import binascii +import hashlib +import re +import sys +import os +import base64 +import time +import random +import hmac +from .ripemd import * + +# Elliptic curve parameters (secp256k1) + +P = 2**256 - 2**32 - 977 +N = 115792089237316195423570985008687907852837564279074904382605163141518161494337 +A = 0 +B = 7 +Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 +Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 +G = (Gx, Gy) + + +def change_curve(p, n, a, b, gx, gy): + global P, N, A, B, Gx, Gy, G + P, N, A, B, Gx, Gy = p, n, a, b, gx, gy + G = (Gx, Gy) + + +def getG(): + return G + +# Extended Euclidean Algorithm + + +def inv(a, n): + if a == 0: + return 0 + lm, hm = 1, 0 + low, high = a % n, n + while low > 1: + r = high//low + nm, new = hm-lm*r, high-low*r + lm, low, hm, high = nm, new, lm, low + return lm % n + + + +# JSON access (for pybtctool convenience) + + +def access(obj, prop): + if isinstance(obj, dict): + if prop in obj: + return obj[prop] + elif '.' in prop: + return obj[float(prop)] + else: + return obj[int(prop)] + else: + return obj[int(prop)] + + +def multiaccess(obj, prop): + return [access(o, prop) for o in obj] + + +def slice(obj, start=0, end=2**200): + return obj[int(start):int(end)] + + +def count(obj): + return len(obj) + +_sum = sum + + +def sum(obj): + return _sum(obj) + + +def isinf(p): + return p[0] == 0 and p[1] == 0 + + +def to_jacobian(p): + o = (p[0], p[1], 1) + return o + + +def jacobian_double(p): + if not p[1]: + return (0, 0, 0) + ysq = (p[1] ** 2) % P + S = (4 * p[0] * ysq) % P + M = (3 * p[0] ** 2 + A * p[2] ** 4) % P + nx = (M**2 - 2 * S) % P + ny = (M * (S - nx) - 8 * ysq ** 2) % P + nz = (2 * p[1] * p[2]) % P + return (nx, ny, nz) + + +def jacobian_add(p, q): + if not p[1]: + return q + if not q[1]: + return p + U1 = (p[0] * q[2] ** 2) % P + U2 = (q[0] * p[2] ** 2) % P + S1 = (p[1] * q[2] ** 3) % P + S2 = (q[1] * p[2] ** 3) % P + if U1 == U2: + if S1 != S2: + return (0, 0, 1) + return jacobian_double(p) + H = U2 - U1 + R = S2 - S1 + H2 = (H * H) % P + H3 = (H * H2) % P + U1H2 = (U1 * H2) % P + nx = (R ** 2 - H3 - 2 * U1H2) % P + ny = (R * (U1H2 - nx) - S1 * H3) % P + nz = (H * p[2] * q[2]) % P + return (nx, ny, nz) + + +def from_jacobian(p): + z = inv(p[2], P) + return ((p[0] * z**2) % P, (p[1] * z**3) % P) + + +def jacobian_multiply(a, n): + if a[1] == 0 or n == 0: + return (0, 0, 1) + if n == 1: + return a + if n < 0 or n >= N: + return jacobian_multiply(a, n % N) + if (n % 2) == 0: + return jacobian_double(jacobian_multiply(a, n//2)) + if (n % 2) == 1: + return jacobian_add(jacobian_double(jacobian_multiply(a, n//2)), a) + + +def fast_multiply(a, n): + return from_jacobian(jacobian_multiply(to_jacobian(a), n)) + + +def fast_add(a, b): + return from_jacobian(jacobian_add(to_jacobian(a), to_jacobian(b))) + +# Functions for handling pubkey and privkey formats + + +def get_pubkey_format(pub): + if is_python2: + two = '\x02' + three = '\x03' + four = '\x04' + else: + two = 2 + three = 3 + four = 4 + + if isinstance(pub, (tuple, list)): return 'decimal' + elif len(pub) == 65 and pub[0] == four: return 'bin' + elif len(pub) == 130 and pub[0:2] == '04': return 'hex' + elif len(pub) == 33 and pub[0] in [two, three]: return 'bin_compressed' + elif len(pub) == 66 and pub[0:2] in ['02', '03']: return 'hex_compressed' + elif len(pub) == 64: return 'bin_electrum' + elif len(pub) == 128: return 'hex_electrum' + else: raise Exception("Pubkey not in recognized format") + + +def encode_pubkey(pub, formt): + if not isinstance(pub, (tuple, list)): + pub = decode_pubkey(pub) + if formt == 'decimal': return pub + elif formt == 'bin': return b'\x04' + encode(pub[0], 256, 32) + encode(pub[1], 256, 32) + elif formt == 'bin_compressed': + return from_int_to_byte(2+(pub[1] % 2)) + encode(pub[0], 256, 32) + elif formt == 'hex': return '04' + encode(pub[0], 16, 64) + encode(pub[1], 16, 64) + elif formt == 'hex_compressed': + return '0'+str(2+(pub[1] % 2)) + encode(pub[0], 16, 64) + elif formt == 'bin_electrum': return encode(pub[0], 256, 32) + encode(pub[1], 256, 32) + elif formt == 'hex_electrum': return encode(pub[0], 16, 64) + encode(pub[1], 16, 64) + else: raise Exception("Invalid format!") + + +def decode_pubkey(pub, formt=None): + if not formt: formt = get_pubkey_format(pub) + if formt == 'decimal': return pub + elif formt == 'bin': return (decode(pub[1:33], 256), decode(pub[33:65], 256)) + elif formt == 'bin_compressed': + x = decode(pub[1:33], 256) + beta = pow(int(x*x*x+A*x+B), int((P+1)//4), int(P)) + y = (P-beta) if ((beta + from_byte_to_int(pub[0])) % 2) else beta + return (x, y) + elif formt == 'hex': return (decode(pub[2:66], 16), decode(pub[66:130], 16)) + elif formt == 'hex_compressed': + return decode_pubkey(safe_from_hex(pub), 'bin_compressed') + elif formt == 'bin_electrum': + return (decode(pub[:32], 256), decode(pub[32:64], 256)) + elif formt == 'hex_electrum': + return (decode(pub[:64], 16), decode(pub[64:128], 16)) + else: raise Exception("Invalid format!") + +def get_privkey_format(priv): + if isinstance(priv, int_types): return 'decimal' + elif len(priv) == 32: return 'bin' + elif len(priv) == 33: return 'bin_compressed' + elif len(priv) == 64: return 'hex' + elif len(priv) == 66: return 'hex_compressed' + else: + bin_p = b58check_to_bin(priv) + if len(bin_p) == 32: return 'wif' + elif len(bin_p) == 33: return 'wif_compressed' + else: raise Exception("WIF does not represent privkey") + +def encode_privkey(priv, formt, vbyte=0): + if not isinstance(priv, int_types): + return encode_privkey(decode_privkey(priv), formt, vbyte) + if formt == 'decimal': return priv + elif formt == 'bin': return encode(priv, 256, 32) + elif formt == 'bin_compressed': return encode(priv, 256, 32)+b'\x01' + elif formt == 'hex': return encode(priv, 16, 64) + elif formt == 'hex_compressed': return encode(priv, 16, 64)+'01' + elif formt == 'wif': + return bin_to_b58check(encode(priv, 256, 32), 128+int(vbyte)) + elif formt == 'wif_compressed': + return bin_to_b58check(encode(priv, 256, 32)+b'\x01', 128+int(vbyte)) + else: raise Exception("Invalid format!") + +def decode_privkey(priv,formt=None): + if not formt: formt = get_privkey_format(priv) + if formt == 'decimal': return priv + elif formt == 'bin': return decode(priv, 256) + elif formt == 'bin_compressed': return decode(priv[:32], 256) + elif formt == 'hex': return decode(priv, 16) + elif formt == 'hex_compressed': return decode(priv[:64], 16) + elif formt == 'wif': return decode(b58check_to_bin(priv),256) + elif formt == 'wif_compressed': + return decode(b58check_to_bin(priv)[:32],256) + else: raise Exception("WIF does not represent privkey") + +def add_pubkeys(p1, p2): + f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2) + return encode_pubkey(fast_add(decode_pubkey(p1, f1), decode_pubkey(p2, f2)), f1) + +def add_privkeys(p1, p2): + f1, f2 = get_privkey_format(p1), get_privkey_format(p2) + return encode_privkey((decode_privkey(p1, f1) + decode_privkey(p2, f2)) % N, f1) + +def mul_privkeys(p1, p2): + f1, f2 = get_privkey_format(p1), get_privkey_format(p2) + return encode_privkey((decode_privkey(p1, f1) * decode_privkey(p2, f2)) % N, f1) + +def multiply(pubkey, privkey): + f1, f2 = get_pubkey_format(pubkey), get_privkey_format(privkey) + pubkey, privkey = decode_pubkey(pubkey, f1), decode_privkey(privkey, f2) + # http://safecurves.cr.yp.to/twist.html + if not isinf(pubkey) and (pubkey[0]**3+B-pubkey[1]*pubkey[1]) % P != 0: + raise Exception("Point not on curve") + return encode_pubkey(fast_multiply(pubkey, privkey), f1) + + +def divide(pubkey, privkey): + factor = inv(decode_privkey(privkey), N) + return multiply(pubkey, factor) + + +def compress(pubkey): + f = get_pubkey_format(pubkey) + if 'compressed' in f: return pubkey + elif f == 'bin': return encode_pubkey(decode_pubkey(pubkey, f), 'bin_compressed') + elif f == 'hex' or f == 'decimal': + return encode_pubkey(decode_pubkey(pubkey, f), 'hex_compressed') + + +def decompress(pubkey): + f = get_pubkey_format(pubkey) + if 'compressed' not in f: return pubkey + elif f == 'bin_compressed': return encode_pubkey(decode_pubkey(pubkey, f), 'bin') + elif f == 'hex_compressed' or f == 'decimal': + return encode_pubkey(decode_pubkey(pubkey, f), 'hex') + + +def privkey_to_pubkey(privkey): + f = get_privkey_format(privkey) + privkey = decode_privkey(privkey, f) + if privkey >= N: + raise Exception("Invalid privkey") + if f in ['bin', 'bin_compressed', 'hex', 'hex_compressed', 'decimal']: + return encode_pubkey(fast_multiply(G, privkey), f) + else: + return encode_pubkey(fast_multiply(G, privkey), f.replace('wif', 'hex')) + +privtopub = privkey_to_pubkey + + +def privkey_to_address(priv, magicbyte=0): + return pubkey_to_address(privkey_to_pubkey(priv), magicbyte) +privtoaddr = privkey_to_address + + +def neg_pubkey(pubkey): + f = get_pubkey_format(pubkey) + pubkey = decode_pubkey(pubkey, f) + return encode_pubkey((pubkey[0], (P-pubkey[1]) % P), f) + + +def neg_privkey(privkey): + f = get_privkey_format(privkey) + privkey = decode_privkey(privkey, f) + return encode_privkey((N - privkey) % N, f) + +def subtract_pubkeys(p1, p2): + f1, f2 = get_pubkey_format(p1), get_pubkey_format(p2) + k2 = decode_pubkey(p2, f2) + return encode_pubkey(fast_add(decode_pubkey(p1, f1), (k2[0], (P - k2[1]) % P)), f1) + + +def subtract_privkeys(p1, p2): + f1, f2 = get_privkey_format(p1), get_privkey_format(p2) + k2 = decode_privkey(p2, f2) + return encode_privkey((decode_privkey(p1, f1) - k2) % N, f1) + +# Hashes + + +def bin_hash160(string): + intermed = hashlib.sha256(string).digest() + digest = '' + try: + digest = hashlib.new('ripemd160', intermed).digest() + except: + digest = RIPEMD160(intermed).digest() + return digest + + +def hash160(string): + return safe_hexlify(bin_hash160(string)) + + +def bin_sha256(string): + binary_data = string if isinstance(string, bytes) else bytes(string, 'utf-8') + return hashlib.sha256(binary_data).digest() + +def sha256(string): + return bytes_to_hex_string(bin_sha256(string)) + + +def bin_ripemd160(string): + try: + digest = hashlib.new('ripemd160', string).digest() + except: + digest = RIPEMD160(string).digest() + return digest + + +def ripemd160(string): + return safe_hexlify(bin_ripemd160(string)) + + +def bin_dbl_sha256(s): + bytes_to_hash = from_string_to_bytes(s) + return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest() + + +def dbl_sha256(string): + return safe_hexlify(bin_dbl_sha256(string)) + + +def bin_slowsha(string): + string = from_string_to_bytes(string) + orig_input = string + for i in range(100000): + string = hashlib.sha256(string + orig_input).digest() + return string + + +def slowsha(string): + return safe_hexlify(bin_slowsha(string)) + + +def hash_to_int(x): + if len(x) in [40, 64]: + return decode(x, 16) + return decode(x, 256) + + +def num_to_var_int(x): + x = int(x) + if x < 253: return from_int_to_byte(x) + elif x < 65536: return from_int_to_byte(253)+encode(x, 256, 2)[::-1] + elif x < 4294967296: return from_int_to_byte(254) + encode(x, 256, 4)[::-1] + else: return from_int_to_byte(255) + encode(x, 256, 8)[::-1] + + +# WTF, Electrum? +def electrum_sig_hash(message): + padded = b"\x18Bitcoin Signed Message:\n" + num_to_var_int(len(message)) + from_string_to_bytes(message) + return bin_dbl_sha256(padded) + + +def random_key(): + # Gotta be secure after that java.SecureRandom fiasco... + entropy = random_string(32) \ + + str(random.randrange(2**256)) \ + + str(int(time.time() * 1000000)) + return sha256(entropy) + + +def random_electrum_seed(): + entropy = os.urandom(32) \ + + str(random.randrange(2**256)) \ + + str(int(time.time() * 1000000)) + return sha256(entropy)[:32] + +# Encodings + +def b58check_to_bin(inp): + leadingzbytes = len(re.match('^1*', inp).group(0)) + data = b'\x00' * leadingzbytes + changebase(inp, 58, 256) + assert bin_dbl_sha256(data[:-4])[:4] == data[-4:] + return data[1:-4] + + +def get_version_byte(inp): + leadingzbytes = len(re.match('^1*', inp).group(0)) + data = b'\x00' * leadingzbytes + changebase(inp, 58, 256) + assert bin_dbl_sha256(data[:-4])[:4] == data[-4:] + return ord(data[0]) + + +def hex_to_b58check(inp, magicbyte=0): + return bin_to_b58check(binascii.unhexlify(inp), magicbyte) + + +def b58check_to_hex(inp): + return safe_hexlify(b58check_to_bin(inp)) + + +def pubkey_to_address(pubkey, magicbyte=0): + if isinstance(pubkey, (list, tuple)): + pubkey = encode_pubkey(pubkey, 'bin') + if len(pubkey) in [66, 130]: + return bin_to_b58check( + bin_hash160(binascii.unhexlify(pubkey)), magicbyte) + return bin_to_b58check(bin_hash160(pubkey), magicbyte) + +pubtoaddr = pubkey_to_address + + +def is_privkey(priv): + try: + get_privkey_format(priv) + return True + except: + return False + +def is_pubkey(pubkey): + try: + get_pubkey_format(pubkey) + return True + except: + return False + +def is_address(addr): + ADDR_RE = re.compile("^[123mn][a-km-zA-HJ-NP-Z0-9]{26,33}$") + return bool(ADDR_RE.match(addr)) + + +# EDCSA + + +def encode_sig(v, r, s): + vb, rb, sb = from_int_to_byte(v), encode(r, 256), encode(s, 256) + + result = base64.b64encode(vb+b'\x00'*(32-len(rb))+rb+b'\x00'*(32-len(sb))+sb) + return result if is_python2 else str(result, 'utf-8') + + +def decode_sig(sig): + bytez = base64.b64decode(sig) + return from_byte_to_int(bytez[0]), decode(bytez[1:33], 256), decode(bytez[33:], 256) + +# https://tools.ietf.org/html/rfc6979#section-3.2 + + +def deterministic_generate_k(msghash, priv): + v = b'\x01' * 32 + k = b'\x00' * 32 + priv = encode_privkey(priv, 'bin') + msghash = encode(hash_to_int(msghash), 256, 32) + k = hmac.new(k, v+b'\x00'+priv+msghash, hashlib.sha256).digest() + v = hmac.new(k, v, hashlib.sha256).digest() + k = hmac.new(k, v+b'\x01'+priv+msghash, hashlib.sha256).digest() + v = hmac.new(k, v, hashlib.sha256).digest() + return decode(hmac.new(k, v, hashlib.sha256).digest(), 256) + + +def ecdsa_raw_sign(msghash, priv): + + z = hash_to_int(msghash) + k = deterministic_generate_k(msghash, priv) + + r, y = fast_multiply(G, k) + s = inv(k, N) * (z + r*decode_privkey(priv)) % N + + v, r, s = 27+((y % 2) ^ (0 if s * 2 < N else 1)), r, s if s * 2 < N else N - s + if 'compressed' in get_privkey_format(priv): + v += 4 + return v, r, s + + +def ecdsa_sign(msg, priv): + v, r, s = ecdsa_raw_sign(electrum_sig_hash(msg), priv) + sig = encode_sig(v, r, s) + assert ecdsa_verify(msg, sig, + privtopub(priv)), "Bad Sig!\t %s\nv = %d\n,r = %d\ns = %d" % (sig, v, r, s) + return sig + + +def ecdsa_raw_verify(msghash, vrs, pub): + v, r, s = vrs + if not (27 <= v <= 34): + return False + + w = inv(s, N) + z = hash_to_int(msghash) + + u1, u2 = z*w % N, r*w % N + x, y = fast_add(fast_multiply(G, u1), fast_multiply(decode_pubkey(pub), u2)) + return bool(r == x and (r % N) and (s % N)) + + +# For BitcoinCore, (msg = addr or msg = "") be default +def ecdsa_verify_addr(msg, sig, addr): + assert is_address(addr) + Q = ecdsa_recover(msg, sig) + magic = get_version_byte(addr) + return (addr == pubtoaddr(Q, int(magic))) or (addr == pubtoaddr(compress(Q), int(magic))) + + +def ecdsa_verify(msg, sig, pub): + if is_address(pub): + return ecdsa_verify_addr(msg, sig, pub) + return ecdsa_raw_verify(electrum_sig_hash(msg), decode_sig(sig), pub) + + +def ecdsa_raw_recover(msghash, vrs): + v, r, s = vrs + if not (27 <= v <= 34): + raise ValueError("%d must in range 27-31" % v) + x = r + xcubedaxb = (x*x*x+A*x+B) % P + beta = pow(xcubedaxb, (P+1)//4, P) + y = beta if v % 2 ^ beta % 2 else (P - beta) + # If xcubedaxb is not a quadratic residue, then r cannot be the x coord + # for a point on the curve, and so the sig is invalid + if (xcubedaxb - y*y) % P != 0 or not (r % N) or not (s % N): + return False + z = hash_to_int(msghash) + Gz = jacobian_multiply((Gx, Gy, 1), (N - z) % N) + XY = jacobian_multiply((x, y, 1), s) + Qr = jacobian_add(Gz, XY) + Q = jacobian_multiply(Qr, inv(r, N)) + Q = from_jacobian(Q) + + # if ecdsa_raw_verify(msghash, vrs, Q): + return Q + # return False + + +def ecdsa_recover(msg, sig): + v,r,s = decode_sig(sig) + Q = ecdsa_raw_recover(electrum_sig_hash(msg), (v,r,s)) + return encode_pubkey(Q, 'hex_compressed') if v >= 31 else encode_pubkey(Q, 'hex') diff --git a/src/lib/pybitcointools/bitcoin/mnemonic.py b/src/lib/pybitcointools/bitcoin/mnemonic.py new file mode 100644 index 00000000..a9df3617 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/mnemonic.py @@ -0,0 +1,127 @@ +import hashlib +import os.path +import binascii +import random +from bisect import bisect_left + +wordlist_english=list(open(os.path.join(os.path.dirname(os.path.realpath(__file__)),'english.txt'),'r')) + +def eint_to_bytes(entint,entbits): + a=hex(entint)[2:].rstrip('L').zfill(32) + print(a) + return binascii.unhexlify(a) + +def mnemonic_int_to_words(mint,mint_num_words,wordlist=wordlist_english): + backwords=[wordlist[(mint >> (11*x)) & 0x7FF].strip() for x in range(mint_num_words)] + return backwords[::-1] + +def entropy_cs(entbytes): + entropy_size=8*len(entbytes) + checksum_size=entropy_size//32 + hd=hashlib.sha256(entbytes).hexdigest() + csint=int(hd,16) >> (256-checksum_size) + return csint,checksum_size + +def entropy_to_words(entbytes,wordlist=wordlist_english): + if(len(entbytes) < 4 or len(entbytes) % 4 != 0): + raise ValueError("The size of the entropy must be a multiple of 4 bytes (multiple of 32 bits)") + entropy_size=8*len(entbytes) + csint,checksum_size = entropy_cs(entbytes) + entint=int(binascii.hexlify(entbytes),16) + mint=(entint << checksum_size) | csint + mint_num_words=(entropy_size+checksum_size)//11 + + return mnemonic_int_to_words(mint,mint_num_words,wordlist) + +def words_bisect(word,wordlist=wordlist_english): + lo=bisect_left(wordlist,word) + hi=len(wordlist)-bisect_left(wordlist[:lo:-1],word) + + return lo,hi + +def words_split(wordstr,wordlist=wordlist_english): + def popword(wordstr,wordlist): + for fwl in range(1,9): + w=wordstr[:fwl].strip() + lo,hi=words_bisect(w,wordlist) + if(hi-lo == 1): + return w,wordstr[fwl:].lstrip() + wordlist=wordlist[lo:hi] + raise Exception("Wordstr %s not found in list" %(w)) + + words=[] + tail=wordstr + while(len(tail)): + head,tail=popword(tail,wordlist) + words.append(head) + return words + +def words_to_mnemonic_int(words,wordlist=wordlist_english): + if(isinstance(words,str)): + words=words_split(words,wordlist) + return sum([wordlist.index(w) << (11*x) for x,w in enumerate(words[::-1])]) + +def words_verify(words,wordlist=wordlist_english): + if(isinstance(words,str)): + words=words_split(words,wordlist) + + mint = words_to_mnemonic_int(words,wordlist) + mint_bits=len(words)*11 + cs_bits=mint_bits//32 + entropy_bits=mint_bits-cs_bits + eint=mint >> cs_bits + csint=mint & ((1 << cs_bits)-1) + ebytes=_eint_to_bytes(eint,entropy_bits) + return csint == entropy_cs(ebytes) + +def mnemonic_to_seed(mnemonic_phrase,passphrase=b''): + try: + from hashlib import pbkdf2_hmac + def pbkdf2_hmac_sha256(password,salt,iters=2048): + return pbkdf2_hmac(hash_name='sha512',password=password,salt=salt,iterations=iters) + except: + try: + from Crypto.Protocol.KDF import PBKDF2 + from Crypto.Hash import SHA512,HMAC + + def pbkdf2_hmac_sha256(password,salt,iters=2048): + return PBKDF2(password=password,salt=salt,dkLen=64,count=iters,prf=lambda p,s: HMAC.new(p,s,SHA512).digest()) + except: + try: + + from pbkdf2 import PBKDF2 + import hmac + def pbkdf2_hmac_sha256(password,salt,iters=2048): + return PBKDF2(password,salt, iterations=iters, macmodule=hmac, digestmodule=hashlib.sha512).read(64) + except: + raise RuntimeError("No implementation of pbkdf2 was found!") + + return pbkdf2_hmac_sha256(password=mnemonic_phrase,salt=b'mnemonic'+passphrase) + +def words_mine(prefix,entbits,satisfunction,wordlist=wordlist_english,randombits=random.getrandbits): + prefix_bits=len(prefix)*11 + mine_bits=entbits-prefix_bits + pint=words_to_mnemonic_int(prefix,wordlist) + pint<<=mine_bits + dint=randombits(mine_bits) + count=0 + while(not satisfunction(entropy_to_words(eint_to_bytes(pint+dint,entbits)))): + dint=randombits(mine_bits) + if((count & 0xFFFF) == 0): + print("Searched %f percent of the space" % (float(count)/float(1 << mine_bits))) + + return entropy_to_words(eint_to_bytes(pint+dint,entbits)) + +if __name__=="__main__": + import json + testvectors=json.load(open('vectors.json','r')) + passed=True + for v in testvectors['english']: + ebytes=binascii.unhexlify(v[0]) + w=' '.join(entropy_to_words(ebytes)) + seed=mnemonic_to_seed(w,passphrase='TREZOR') + passed = passed and w==v[1] + passed = passed and binascii.hexlify(seed)==v[2] + print("Tests %s." % ("Passed" if passed else "Failed")) + + diff --git a/src/lib/pybitcointools/bitcoin/py2specials.py b/src/lib/pybitcointools/bitcoin/py2specials.py new file mode 100644 index 00000000..337154f3 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/py2specials.py @@ -0,0 +1,98 @@ +import sys, re +import binascii +import os +import hashlib + + +if sys.version_info.major == 2: + string_types = (str, unicode) + string_or_bytes_types = string_types + int_types = (int, float, long) + + # Base switching + code_strings = { + 2: '01', + 10: '0123456789', + 16: '0123456789abcdef', + 32: 'abcdefghijklmnopqrstuvwxyz234567', + 58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz', + 256: ''.join([chr(x) for x in range(256)]) + } + + def bin_dbl_sha256(s): + bytes_to_hash = from_string_to_bytes(s) + return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest() + + def lpad(msg, symbol, length): + if len(msg) >= length: + return msg + return symbol * (length - len(msg)) + msg + + def get_code_string(base): + if base in code_strings: + return code_strings[base] + else: + raise ValueError("Invalid base!") + + def changebase(string, frm, to, minlen=0): + if frm == to: + return lpad(string, get_code_string(frm)[0], minlen) + return encode(decode(string, frm), to, minlen) + + def bin_to_b58check(inp, magicbyte=0): + if magicbyte == 0: + inp = '\x00' + inp + while magicbyte > 0: + inp = chr(int(magicbyte % 256)) + inp + magicbyte //= 256 + leadingzbytes = len(re.match('^\x00*', inp).group(0)) + checksum = bin_dbl_sha256(inp)[:4] + return '1' * leadingzbytes + changebase(inp+checksum, 256, 58) + + def bytes_to_hex_string(b): + return b.encode('hex') + + def safe_from_hex(s): + return s.decode('hex') + + def from_int_representation_to_bytes(a): + return str(a) + + def from_int_to_byte(a): + return chr(a) + + def from_byte_to_int(a): + return ord(a) + + def from_bytes_to_string(s): + return s + + def from_string_to_bytes(a): + return a + + def safe_hexlify(a): + return binascii.hexlify(a) + + def encode(val, base, minlen=0): + base, minlen = int(base), int(minlen) + code_string = get_code_string(base) + result = "" + while val > 0: + result = code_string[val % base] + result + val //= base + return code_string[0] * max(minlen - len(result), 0) + result + + def decode(string, base): + base = int(base) + code_string = get_code_string(base) + result = 0 + if base == 16: + string = string.lower() + while len(string) > 0: + result *= base + result += code_string.find(string[0]) + string = string[1:] + return result + + def random_string(x): + return os.urandom(x) diff --git a/src/lib/pybitcointools/bitcoin/py3specials.py b/src/lib/pybitcointools/bitcoin/py3specials.py new file mode 100644 index 00000000..7593b9a6 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/py3specials.py @@ -0,0 +1,123 @@ +import sys, os +import binascii +import hashlib + + +if sys.version_info.major == 3: + string_types = (str) + string_or_bytes_types = (str, bytes) + int_types = (int, float) + # Base switching + code_strings = { + 2: '01', + 10: '0123456789', + 16: '0123456789abcdef', + 32: 'abcdefghijklmnopqrstuvwxyz234567', + 58: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz', + 256: ''.join([chr(x) for x in range(256)]) + } + + def bin_dbl_sha256(s): + bytes_to_hash = from_string_to_bytes(s) + return hashlib.sha256(hashlib.sha256(bytes_to_hash).digest()).digest() + + def lpad(msg, symbol, length): + if len(msg) >= length: + return msg + return symbol * (length - len(msg)) + msg + + def get_code_string(base): + if base in code_strings: + return code_strings[base] + else: + raise ValueError("Invalid base!") + + def changebase(string, frm, to, minlen=0): + if frm == to: + return lpad(string, get_code_string(frm)[0], minlen) + return encode(decode(string, frm), to, minlen) + + def bin_to_b58check(inp, magicbyte=0): + if magicbyte == 0: + inp = from_int_to_byte(0) + inp + while magicbyte > 0: + inp = from_int_to_byte(magicbyte % 256) + inp + magicbyte //= 256 + + leadingzbytes = 0 + for x in inp: + if x != 0: + break + leadingzbytes += 1 + + checksum = bin_dbl_sha256(inp)[:4] + return '1' * leadingzbytes + changebase(inp+checksum, 256, 58) + + def bytes_to_hex_string(b): + if isinstance(b, str): + return b + + return ''.join('{:02x}'.format(y) for y in b) + + def safe_from_hex(s): + return bytes.fromhex(s) + + def from_int_representation_to_bytes(a): + return bytes(str(a), 'utf-8') + + def from_int_to_byte(a): + return bytes([a]) + + def from_byte_to_int(a): + return a + + def from_string_to_bytes(a): + return a if isinstance(a, bytes) else bytes(a, 'utf-8') + + def safe_hexlify(a): + return str(binascii.hexlify(a), 'utf-8') + + def encode(val, base, minlen=0): + base, minlen = int(base), int(minlen) + code_string = get_code_string(base) + result_bytes = bytes() + while val > 0: + curcode = code_string[val % base] + result_bytes = bytes([ord(curcode)]) + result_bytes + val //= base + + pad_size = minlen - len(result_bytes) + + padding_element = b'\x00' if base == 256 else b'1' \ + if base == 58 else b'0' + if (pad_size > 0): + result_bytes = padding_element*pad_size + result_bytes + + result_string = ''.join([chr(y) for y in result_bytes]) + result = result_bytes if base == 256 else result_string + + return result + + def decode(string, base): + if base == 256 and isinstance(string, str): + string = bytes(bytearray.fromhex(string)) + base = int(base) + code_string = get_code_string(base) + result = 0 + if base == 256: + def extract(d, cs): + return d + else: + def extract(d, cs): + return cs.find(d if isinstance(d, str) else chr(d)) + + if base == 16: + string = string.lower() + while len(string) > 0: + result *= base + result += extract(string[0], code_string) + string = string[1:] + return result + + def random_string(x): + return str(os.urandom(x)) diff --git a/src/lib/pybitcointools/bitcoin/ripemd.py b/src/lib/pybitcointools/bitcoin/ripemd.py new file mode 100644 index 00000000..4b0c6045 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/ripemd.py @@ -0,0 +1,414 @@ +## ripemd.py - pure Python implementation of the RIPEMD-160 algorithm. +## Bjorn Edstrom 16 december 2007. +## +## Copyrights +## ========== +## +## This code is a derived from an implementation by Markus Friedl which is +## subject to the following license. This Python implementation is not +## subject to any other license. +## +##/* +## * Copyright (c) 2001 Markus Friedl. All rights reserved. +## * +## * Redistribution and use in source and binary forms, with or without +## * modification, are permitted provided that the following conditions +## * are met: +## * 1. Redistributions of source code must retain the above copyright +## * notice, this list of conditions and the following disclaimer. +## * 2. Redistributions in binary form must reproduce the above copyright +## * notice, this list of conditions and the following disclaimer in the +## * documentation and/or other materials provided with the distribution. +## * +## * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +## * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +## * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +## * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +## * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +## * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +## * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +## * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +## * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +## * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +## */ +##/* +## * Preneel, Bosselaers, Dobbertin, "The Cryptographic Hash Function RIPEMD-160", +## * RSA Laboratories, CryptoBytes, Volume 3, Number 2, Autumn 1997, +## * ftp://ftp.rsasecurity.com/pub/cryptobytes/crypto3n2.pdf +## */ + +try: + import psyco + psyco.full() +except ImportError: + pass + +import sys + +is_python2 = sys.version_info.major == 2 +#block_size = 1 +digest_size = 20 +digestsize = 20 + +try: + range = xrange +except: + pass + +class RIPEMD160: + """Return a new RIPEMD160 object. An optional string argument + may be provided; if present, this string will be automatically + hashed.""" + + def __init__(self, arg=None): + self.ctx = RMDContext() + if arg: + self.update(arg) + self.dig = None + + def update(self, arg): + """update(arg)""" + RMD160Update(self.ctx, arg, len(arg)) + self.dig = None + + def digest(self): + """digest()""" + if self.dig: + return self.dig + ctx = self.ctx.copy() + self.dig = RMD160Final(self.ctx) + self.ctx = ctx + return self.dig + + def hexdigest(self): + """hexdigest()""" + dig = self.digest() + hex_digest = '' + for d in dig: + if (is_python2): + hex_digest += '%02x' % ord(d) + else: + hex_digest += '%02x' % d + return hex_digest + + def copy(self): + """copy()""" + import copy + return copy.deepcopy(self) + + + +def new(arg=None): + """Return a new RIPEMD160 object. An optional string argument + may be provided; if present, this string will be automatically + hashed.""" + return RIPEMD160(arg) + + + +# +# Private. +# + +class RMDContext: + def __init__(self): + self.state = [0x67452301, 0xEFCDAB89, 0x98BADCFE, + 0x10325476, 0xC3D2E1F0] # uint32 + self.count = 0 # uint64 + self.buffer = [0]*64 # uchar + def copy(self): + ctx = RMDContext() + ctx.state = self.state[:] + ctx.count = self.count + ctx.buffer = self.buffer[:] + return ctx + +K0 = 0x00000000 +K1 = 0x5A827999 +K2 = 0x6ED9EBA1 +K3 = 0x8F1BBCDC +K4 = 0xA953FD4E + +KK0 = 0x50A28BE6 +KK1 = 0x5C4DD124 +KK2 = 0x6D703EF3 +KK3 = 0x7A6D76E9 +KK4 = 0x00000000 + +def ROL(n, x): + return ((x << n) & 0xffffffff) | (x >> (32 - n)) + +def F0(x, y, z): + return x ^ y ^ z + +def F1(x, y, z): + return (x & y) | (((~x) % 0x100000000) & z) + +def F2(x, y, z): + return (x | ((~y) % 0x100000000)) ^ z + +def F3(x, y, z): + return (x & z) | (((~z) % 0x100000000) & y) + +def F4(x, y, z): + return x ^ (y | ((~z) % 0x100000000)) + +def R(a, b, c, d, e, Fj, Kj, sj, rj, X): + a = ROL(sj, (a + Fj(b, c, d) + X[rj] + Kj) % 0x100000000) + e + c = ROL(10, c) + return a % 0x100000000, c + +PADDING = [0x80] + [0]*63 + +import sys +import struct + +def RMD160Transform(state, block): #uint32 state[5], uchar block[64] + x = [0]*16 + if sys.byteorder == 'little': + if is_python2: + x = struct.unpack('<16L', ''.join([chr(x) for x in block[0:64]])) + else: + x = struct.unpack('<16L', bytes(block[0:64])) + else: + raise "Error!!" + a = state[0] + b = state[1] + c = state[2] + d = state[3] + e = state[4] + + #/* Round 1 */ + a, c = R(a, b, c, d, e, F0, K0, 11, 0, x); + e, b = R(e, a, b, c, d, F0, K0, 14, 1, x); + d, a = R(d, e, a, b, c, F0, K0, 15, 2, x); + c, e = R(c, d, e, a, b, F0, K0, 12, 3, x); + b, d = R(b, c, d, e, a, F0, K0, 5, 4, x); + a, c = R(a, b, c, d, e, F0, K0, 8, 5, x); + e, b = R(e, a, b, c, d, F0, K0, 7, 6, x); + d, a = R(d, e, a, b, c, F0, K0, 9, 7, x); + c, e = R(c, d, e, a, b, F0, K0, 11, 8, x); + b, d = R(b, c, d, e, a, F0, K0, 13, 9, x); + a, c = R(a, b, c, d, e, F0, K0, 14, 10, x); + e, b = R(e, a, b, c, d, F0, K0, 15, 11, x); + d, a = R(d, e, a, b, c, F0, K0, 6, 12, x); + c, e = R(c, d, e, a, b, F0, K0, 7, 13, x); + b, d = R(b, c, d, e, a, F0, K0, 9, 14, x); + a, c = R(a, b, c, d, e, F0, K0, 8, 15, x); #/* #15 */ + #/* Round 2 */ + e, b = R(e, a, b, c, d, F1, K1, 7, 7, x); + d, a = R(d, e, a, b, c, F1, K1, 6, 4, x); + c, e = R(c, d, e, a, b, F1, K1, 8, 13, x); + b, d = R(b, c, d, e, a, F1, K1, 13, 1, x); + a, c = R(a, b, c, d, e, F1, K1, 11, 10, x); + e, b = R(e, a, b, c, d, F1, K1, 9, 6, x); + d, a = R(d, e, a, b, c, F1, K1, 7, 15, x); + c, e = R(c, d, e, a, b, F1, K1, 15, 3, x); + b, d = R(b, c, d, e, a, F1, K1, 7, 12, x); + a, c = R(a, b, c, d, e, F1, K1, 12, 0, x); + e, b = R(e, a, b, c, d, F1, K1, 15, 9, x); + d, a = R(d, e, a, b, c, F1, K1, 9, 5, x); + c, e = R(c, d, e, a, b, F1, K1, 11, 2, x); + b, d = R(b, c, d, e, a, F1, K1, 7, 14, x); + a, c = R(a, b, c, d, e, F1, K1, 13, 11, x); + e, b = R(e, a, b, c, d, F1, K1, 12, 8, x); #/* #31 */ + #/* Round 3 */ + d, a = R(d, e, a, b, c, F2, K2, 11, 3, x); + c, e = R(c, d, e, a, b, F2, K2, 13, 10, x); + b, d = R(b, c, d, e, a, F2, K2, 6, 14, x); + a, c = R(a, b, c, d, e, F2, K2, 7, 4, x); + e, b = R(e, a, b, c, d, F2, K2, 14, 9, x); + d, a = R(d, e, a, b, c, F2, K2, 9, 15, x); + c, e = R(c, d, e, a, b, F2, K2, 13, 8, x); + b, d = R(b, c, d, e, a, F2, K2, 15, 1, x); + a, c = R(a, b, c, d, e, F2, K2, 14, 2, x); + e, b = R(e, a, b, c, d, F2, K2, 8, 7, x); + d, a = R(d, e, a, b, c, F2, K2, 13, 0, x); + c, e = R(c, d, e, a, b, F2, K2, 6, 6, x); + b, d = R(b, c, d, e, a, F2, K2, 5, 13, x); + a, c = R(a, b, c, d, e, F2, K2, 12, 11, x); + e, b = R(e, a, b, c, d, F2, K2, 7, 5, x); + d, a = R(d, e, a, b, c, F2, K2, 5, 12, x); #/* #47 */ + #/* Round 4 */ + c, e = R(c, d, e, a, b, F3, K3, 11, 1, x); + b, d = R(b, c, d, e, a, F3, K3, 12, 9, x); + a, c = R(a, b, c, d, e, F3, K3, 14, 11, x); + e, b = R(e, a, b, c, d, F3, K3, 15, 10, x); + d, a = R(d, e, a, b, c, F3, K3, 14, 0, x); + c, e = R(c, d, e, a, b, F3, K3, 15, 8, x); + b, d = R(b, c, d, e, a, F3, K3, 9, 12, x); + a, c = R(a, b, c, d, e, F3, K3, 8, 4, x); + e, b = R(e, a, b, c, d, F3, K3, 9, 13, x); + d, a = R(d, e, a, b, c, F3, K3, 14, 3, x); + c, e = R(c, d, e, a, b, F3, K3, 5, 7, x); + b, d = R(b, c, d, e, a, F3, K3, 6, 15, x); + a, c = R(a, b, c, d, e, F3, K3, 8, 14, x); + e, b = R(e, a, b, c, d, F3, K3, 6, 5, x); + d, a = R(d, e, a, b, c, F3, K3, 5, 6, x); + c, e = R(c, d, e, a, b, F3, K3, 12, 2, x); #/* #63 */ + #/* Round 5 */ + b, d = R(b, c, d, e, a, F4, K4, 9, 4, x); + a, c = R(a, b, c, d, e, F4, K4, 15, 0, x); + e, b = R(e, a, b, c, d, F4, K4, 5, 5, x); + d, a = R(d, e, a, b, c, F4, K4, 11, 9, x); + c, e = R(c, d, e, a, b, F4, K4, 6, 7, x); + b, d = R(b, c, d, e, a, F4, K4, 8, 12, x); + a, c = R(a, b, c, d, e, F4, K4, 13, 2, x); + e, b = R(e, a, b, c, d, F4, K4, 12, 10, x); + d, a = R(d, e, a, b, c, F4, K4, 5, 14, x); + c, e = R(c, d, e, a, b, F4, K4, 12, 1, x); + b, d = R(b, c, d, e, a, F4, K4, 13, 3, x); + a, c = R(a, b, c, d, e, F4, K4, 14, 8, x); + e, b = R(e, a, b, c, d, F4, K4, 11, 11, x); + d, a = R(d, e, a, b, c, F4, K4, 8, 6, x); + c, e = R(c, d, e, a, b, F4, K4, 5, 15, x); + b, d = R(b, c, d, e, a, F4, K4, 6, 13, x); #/* #79 */ + + aa = a; + bb = b; + cc = c; + dd = d; + ee = e; + + a = state[0] + b = state[1] + c = state[2] + d = state[3] + e = state[4] + + #/* Parallel round 1 */ + a, c = R(a, b, c, d, e, F4, KK0, 8, 5, x) + e, b = R(e, a, b, c, d, F4, KK0, 9, 14, x) + d, a = R(d, e, a, b, c, F4, KK0, 9, 7, x) + c, e = R(c, d, e, a, b, F4, KK0, 11, 0, x) + b, d = R(b, c, d, e, a, F4, KK0, 13, 9, x) + a, c = R(a, b, c, d, e, F4, KK0, 15, 2, x) + e, b = R(e, a, b, c, d, F4, KK0, 15, 11, x) + d, a = R(d, e, a, b, c, F4, KK0, 5, 4, x) + c, e = R(c, d, e, a, b, F4, KK0, 7, 13, x) + b, d = R(b, c, d, e, a, F4, KK0, 7, 6, x) + a, c = R(a, b, c, d, e, F4, KK0, 8, 15, x) + e, b = R(e, a, b, c, d, F4, KK0, 11, 8, x) + d, a = R(d, e, a, b, c, F4, KK0, 14, 1, x) + c, e = R(c, d, e, a, b, F4, KK0, 14, 10, x) + b, d = R(b, c, d, e, a, F4, KK0, 12, 3, x) + a, c = R(a, b, c, d, e, F4, KK0, 6, 12, x) #/* #15 */ + #/* Parallel round 2 */ + e, b = R(e, a, b, c, d, F3, KK1, 9, 6, x) + d, a = R(d, e, a, b, c, F3, KK1, 13, 11, x) + c, e = R(c, d, e, a, b, F3, KK1, 15, 3, x) + b, d = R(b, c, d, e, a, F3, KK1, 7, 7, x) + a, c = R(a, b, c, d, e, F3, KK1, 12, 0, x) + e, b = R(e, a, b, c, d, F3, KK1, 8, 13, x) + d, a = R(d, e, a, b, c, F3, KK1, 9, 5, x) + c, e = R(c, d, e, a, b, F3, KK1, 11, 10, x) + b, d = R(b, c, d, e, a, F3, KK1, 7, 14, x) + a, c = R(a, b, c, d, e, F3, KK1, 7, 15, x) + e, b = R(e, a, b, c, d, F3, KK1, 12, 8, x) + d, a = R(d, e, a, b, c, F3, KK1, 7, 12, x) + c, e = R(c, d, e, a, b, F3, KK1, 6, 4, x) + b, d = R(b, c, d, e, a, F3, KK1, 15, 9, x) + a, c = R(a, b, c, d, e, F3, KK1, 13, 1, x) + e, b = R(e, a, b, c, d, F3, KK1, 11, 2, x) #/* #31 */ + #/* Parallel round 3 */ + d, a = R(d, e, a, b, c, F2, KK2, 9, 15, x) + c, e = R(c, d, e, a, b, F2, KK2, 7, 5, x) + b, d = R(b, c, d, e, a, F2, KK2, 15, 1, x) + a, c = R(a, b, c, d, e, F2, KK2, 11, 3, x) + e, b = R(e, a, b, c, d, F2, KK2, 8, 7, x) + d, a = R(d, e, a, b, c, F2, KK2, 6, 14, x) + c, e = R(c, d, e, a, b, F2, KK2, 6, 6, x) + b, d = R(b, c, d, e, a, F2, KK2, 14, 9, x) + a, c = R(a, b, c, d, e, F2, KK2, 12, 11, x) + e, b = R(e, a, b, c, d, F2, KK2, 13, 8, x) + d, a = R(d, e, a, b, c, F2, KK2, 5, 12, x) + c, e = R(c, d, e, a, b, F2, KK2, 14, 2, x) + b, d = R(b, c, d, e, a, F2, KK2, 13, 10, x) + a, c = R(a, b, c, d, e, F2, KK2, 13, 0, x) + e, b = R(e, a, b, c, d, F2, KK2, 7, 4, x) + d, a = R(d, e, a, b, c, F2, KK2, 5, 13, x) #/* #47 */ + #/* Parallel round 4 */ + c, e = R(c, d, e, a, b, F1, KK3, 15, 8, x) + b, d = R(b, c, d, e, a, F1, KK3, 5, 6, x) + a, c = R(a, b, c, d, e, F1, KK3, 8, 4, x) + e, b = R(e, a, b, c, d, F1, KK3, 11, 1, x) + d, a = R(d, e, a, b, c, F1, KK3, 14, 3, x) + c, e = R(c, d, e, a, b, F1, KK3, 14, 11, x) + b, d = R(b, c, d, e, a, F1, KK3, 6, 15, x) + a, c = R(a, b, c, d, e, F1, KK3, 14, 0, x) + e, b = R(e, a, b, c, d, F1, KK3, 6, 5, x) + d, a = R(d, e, a, b, c, F1, KK3, 9, 12, x) + c, e = R(c, d, e, a, b, F1, KK3, 12, 2, x) + b, d = R(b, c, d, e, a, F1, KK3, 9, 13, x) + a, c = R(a, b, c, d, e, F1, KK3, 12, 9, x) + e, b = R(e, a, b, c, d, F1, KK3, 5, 7, x) + d, a = R(d, e, a, b, c, F1, KK3, 15, 10, x) + c, e = R(c, d, e, a, b, F1, KK3, 8, 14, x) #/* #63 */ + #/* Parallel round 5 */ + b, d = R(b, c, d, e, a, F0, KK4, 8, 12, x) + a, c = R(a, b, c, d, e, F0, KK4, 5, 15, x) + e, b = R(e, a, b, c, d, F0, KK4, 12, 10, x) + d, a = R(d, e, a, b, c, F0, KK4, 9, 4, x) + c, e = R(c, d, e, a, b, F0, KK4, 12, 1, x) + b, d = R(b, c, d, e, a, F0, KK4, 5, 5, x) + a, c = R(a, b, c, d, e, F0, KK4, 14, 8, x) + e, b = R(e, a, b, c, d, F0, KK4, 6, 7, x) + d, a = R(d, e, a, b, c, F0, KK4, 8, 6, x) + c, e = R(c, d, e, a, b, F0, KK4, 13, 2, x) + b, d = R(b, c, d, e, a, F0, KK4, 6, 13, x) + a, c = R(a, b, c, d, e, F0, KK4, 5, 14, x) + e, b = R(e, a, b, c, d, F0, KK4, 15, 0, x) + d, a = R(d, e, a, b, c, F0, KK4, 13, 3, x) + c, e = R(c, d, e, a, b, F0, KK4, 11, 9, x) + b, d = R(b, c, d, e, a, F0, KK4, 11, 11, x) #/* #79 */ + + t = (state[1] + cc + d) % 0x100000000; + state[1] = (state[2] + dd + e) % 0x100000000; + state[2] = (state[3] + ee + a) % 0x100000000; + state[3] = (state[4] + aa + b) % 0x100000000; + state[4] = (state[0] + bb + c) % 0x100000000; + state[0] = t % 0x100000000; + + pass + + +def RMD160Update(ctx, inp, inplen): + if type(inp) == str: + inp = [ord(i)&0xff for i in inp] + + have = int((ctx.count // 8) % 64) + inplen = int(inplen) + need = 64 - have + ctx.count += 8 * inplen + off = 0 + if inplen >= need: + if have: + for i in range(need): + ctx.buffer[have+i] = inp[i] + RMD160Transform(ctx.state, ctx.buffer) + off = need + have = 0 + while off + 64 <= inplen: + RMD160Transform(ctx.state, inp[off:]) #<--- + off += 64 + if off < inplen: + # memcpy(ctx->buffer + have, input+off, len-off); + for i in range(inplen - off): + ctx.buffer[have+i] = inp[off+i] + +def RMD160Final(ctx): + size = struct.pack(" 73: return False + if (sig[0] != 0x30): return False + if (sig[1] != len(sig)-3): return False + rlen = sig[3] + if (5+rlen >= len(sig)): return False + slen = sig[5+rlen] + if (rlen + slen + 7 != len(sig)): return False + if (sig[2] != 0x02): return False + if (rlen == 0): return False + if (sig[4] & 0x80): return False + if (rlen > 1 and (sig[4] == 0x00) and not (sig[5] & 0x80)): return False + if (sig[4+rlen] != 0x02): return False + if (slen == 0): return False + if (sig[rlen+6] & 0x80): return False + if (slen > 1 and (sig[6+rlen] == 0x00) and not (sig[7+rlen] & 0x80)): + return False + return True + +def txhash(tx, hashcode=None): + if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx): + tx = changebase(tx, 16, 256) + if hashcode: + return dbl_sha256(from_string_to_bytes(tx) + encode(int(hashcode), 256, 4)[::-1]) + else: + return safe_hexlify(bin_dbl_sha256(tx)[::-1]) + + +def bin_txhash(tx, hashcode=None): + return binascii.unhexlify(txhash(tx, hashcode)) + + +def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL): + rawsig = ecdsa_raw_sign(bin_txhash(tx, hashcode), priv) + return der_encode_sig(*rawsig)+encode(hashcode, 16, 2) + + +def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL): + return ecdsa_raw_verify(bin_txhash(tx, hashcode), der_decode_sig(sig), pub) + + +def ecdsa_tx_recover(tx, sig, hashcode=SIGHASH_ALL): + z = bin_txhash(tx, hashcode) + _, r, s = der_decode_sig(sig) + left = ecdsa_raw_recover(z, (0, r, s)) + right = ecdsa_raw_recover(z, (1, r, s)) + return (encode_pubkey(left, 'hex'), encode_pubkey(right, 'hex')) + +# Scripts + + +def mk_pubkey_script(addr): + # Keep the auxiliary functions around for altcoins' sake + return '76a914' + b58check_to_hex(addr) + '88ac' + + +def mk_scripthash_script(addr): + return 'a914' + b58check_to_hex(addr) + '87' + +# Address representation to output script + + +def address_to_script(addr): + if addr[0] == '3' or addr[0] == '2': + return mk_scripthash_script(addr) + else: + return mk_pubkey_script(addr) + +# Output script to address representation + + +def script_to_address(script, vbyte=0): + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(script) == 25: + return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses + else: + if vbyte in [111, 196]: + # Testnet + scripthash_byte = 196 + elif vbyte == 0: + # Mainnet + scripthash_byte = 5 + else: + scripthash_byte = vbyte + # BIP0016 scripthash addresses + return bin_to_b58check(script[2:-1], scripthash_byte) + + +def p2sh_scriptaddr(script, magicbyte=5): + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + return hex_to_b58check(hash160(script), magicbyte) +scriptaddr = p2sh_scriptaddr + + +def deserialize_script(script): + if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script): + return json_changebase(deserialize_script(binascii.unhexlify(script)), + lambda x: safe_hexlify(x)) + out, pos = [], 0 + while pos < len(script): + code = from_byte_to_int(script[pos]) + if code == 0: + out.append(None) + pos += 1 + elif code <= 75: + out.append(script[pos+1:pos+1+code]) + pos += 1 + code + elif code <= 78: + szsz = pow(2, code - 76) + sz = decode(script[pos+szsz: pos:-1], 256) + out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz]) + pos += 1 + szsz + sz + elif code <= 96: + out.append(code - 80) + pos += 1 + else: + out.append(code) + pos += 1 + return out + + +def serialize_script_unit(unit): + if isinstance(unit, int): + if unit < 16: + return from_int_to_byte(unit + 80) + else: + return from_int_to_byte(unit) + elif unit is None: + return b'\x00' + else: + if len(unit) <= 75: + return from_int_to_byte(len(unit))+unit + elif len(unit) < 256: + return from_int_to_byte(76)+from_int_to_byte(len(unit))+unit + elif len(unit) < 65536: + return from_int_to_byte(77)+encode(len(unit), 256, 2)[::-1]+unit + else: + return from_int_to_byte(78)+encode(len(unit), 256, 4)[::-1]+unit + + +if is_python2: + def serialize_script(script): + if json_is_base(script, 16): + return binascii.hexlify(serialize_script(json_changebase(script, + lambda x: binascii.unhexlify(x)))) + return ''.join(map(serialize_script_unit, script)) +else: + def serialize_script(script): + if json_is_base(script, 16): + return safe_hexlify(serialize_script(json_changebase(script, + lambda x: binascii.unhexlify(x)))) + + result = bytes() + for b in map(serialize_script_unit, script): + result += b if isinstance(b, bytes) else bytes(b, 'utf-8') + return result + + +def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k + if isinstance(args[0], list): + pubs, k = args[0], int(args[1]) + else: + pubs = list(filter(lambda x: len(str(x)) >= 32, args)) + k = int(args[len(pubs)]) + return serialize_script([k]+pubs+[len(pubs)]+[0xae]) + +# Signing and verifying + + +def verify_tx_input(tx, i, script, sig, pub): + if re.match('^[0-9a-fA-F]*$', tx): + tx = binascii.unhexlify(tx) + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + if not re.match('^[0-9a-fA-F]*$', sig): + sig = safe_hexlify(sig) + hashcode = decode(sig[-2:], 16) + modtx = signature_form(tx, int(i), script, hashcode) + return ecdsa_tx_verify(modtx, sig, pub, hashcode) + + +def sign(tx, i, priv, hashcode=SIGHASH_ALL): + i = int(i) + if (not is_python2 and isinstance(re, bytes)) or not re.match('^[0-9a-fA-F]*$', tx): + return binascii.unhexlify(sign(safe_hexlify(tx), i, priv)) + if len(priv) <= 33: + priv = safe_hexlify(priv) + pub = privkey_to_pubkey(priv) + address = pubkey_to_address(pub) + signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode) + sig = ecdsa_tx_sign(signing_tx, priv, hashcode) + txobj = deserialize(tx) + txobj["ins"][i]["script"] = serialize_script([sig, pub]) + return serialize(txobj) + + +def signall(tx, priv): + # if priv is a dictionary, assume format is + # { 'txinhash:txinidx' : privkey } + if isinstance(priv, dict): + for e, i in enumerate(deserialize(tx)["ins"]): + k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])] + tx = sign(tx, e, k) + else: + for i in range(len(deserialize(tx)["ins"])): + tx = sign(tx, i, priv) + return tx + + +def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL): + if re.match('^[0-9a-fA-F]*$', tx): + tx = binascii.unhexlify(tx) + if re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + modtx = signature_form(tx, i, script, hashcode) + return ecdsa_tx_sign(modtx, pk, hashcode) + + +def apply_multisignatures(*args): + # tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n] + tx, i, script = args[0], int(args[1]), args[2] + sigs = args[3] if isinstance(args[3], list) else list(args[3:]) + + if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script): + script = binascii.unhexlify(script) + sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs] + if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx): + return safe_hexlify(apply_multisignatures(binascii.unhexlify(tx), i, script, sigs)) + + # Not pushing empty elements on the top of the stack if passing no + # script (in case of bare multisig inputs there is no script) + script_blob = [] if script.__len__() == 0 else [script] + + txobj = deserialize(tx) + txobj["ins"][i]["script"] = serialize_script([None]+sigs+script_blob) + return serialize(txobj) + + +def is_inp(arg): + return len(arg) > 64 or "output" in arg or "outpoint" in arg + + +def mktx(*args): + # [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ... + ins, outs = [], [] + for arg in args: + if isinstance(arg, list): + for a in arg: (ins if is_inp(a) else outs).append(a) + else: + (ins if is_inp(arg) else outs).append(arg) + + txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []} + for i in ins: + if isinstance(i, dict) and "outpoint" in i: + txobj["ins"].append(i) + else: + if isinstance(i, dict) and "output" in i: + i = i["output"] + txobj["ins"].append({ + "outpoint": {"hash": i[:64], "index": int(i[65:])}, + "script": "", + "sequence": 4294967295 + }) + for o in outs: + if isinstance(o, string_or_bytes_types): + addr = o[:o.find(':')] + val = int(o[o.find(':')+1:]) + o = {} + if re.match('^[0-9a-fA-F]*$', addr): + o["script"] = addr + else: + o["address"] = addr + o["value"] = val + + outobj = {} + if "address" in o: + outobj["script"] = address_to_script(o["address"]) + elif "script" in o: + outobj["script"] = o["script"] + else: + raise Exception("Could not find 'address' or 'script' in output.") + outobj["value"] = o["value"] + txobj["outs"].append(outobj) + + return serialize(txobj) + + +def select(unspent, value): + value = int(value) + high = [u for u in unspent if u["value"] >= value] + high.sort(key=lambda u: u["value"]) + low = [u for u in unspent if u["value"] < value] + low.sort(key=lambda u: -u["value"]) + if len(high): + return [high[0]] + i, tv = 0, 0 + while tv < value and i < len(low): + tv += low[i]["value"] + i += 1 + if tv < value: + raise Exception("Not enough funds") + return low[:i] + +# Only takes inputs of the form { "output": blah, "value": foo } + + +def mksend(*args): + argz, change, fee = args[:-2], args[-2], int(args[-1]) + ins, outs = [], [] + for arg in argz: + if isinstance(arg, list): + for a in arg: + (ins if is_inp(a) else outs).append(a) + else: + (ins if is_inp(arg) else outs).append(arg) + + isum = sum([i["value"] for i in ins]) + osum, outputs2 = 0, [] + for o in outs: + if isinstance(o, string_types): + o2 = { + "address": o[:o.find(':')], + "value": int(o[o.find(':')+1:]) + } + else: + o2 = o + outputs2.append(o2) + osum += o2["value"] + + if isum < osum+fee: + raise Exception("Not enough money") + elif isum > osum+fee+5430: + outputs2 += [{"address": change, "value": isum-osum-fee}] + + return mktx(ins, outputs2) diff --git a/src/lib/pybitcointools/pybtctool b/src/lib/pybitcointools/pybtctool new file mode 100644 index 00000000..2f6b3dcc --- /dev/null +++ b/src/lib/pybitcointools/pybtctool @@ -0,0 +1,36 @@ +#!/usr/bin/python +import sys, json, re +from bitcoin import * + +if len(sys.argv) == 1: + print "pybtctool ..." +else: + cmdargs, preargs, kwargs = [], [], {} + i = 2 + # Process first arg tag + if sys.argv[1] == '-s': + preargs.extend(re.findall(r'\S\S*', sys.stdin.read())) + elif sys.argv[1] == '-B': + preargs.extend([sys.stdin.read()]) + elif sys.argv[1] == '-b': + preargs.extend([sys.stdin.read()[:-1]]) + elif sys.argv[1] == '-j': + preargs.extend([json.loads(sys.stdin.read())]) + elif sys.argv[1] == '-J': + preargs.extend(json.loads(sys.stdin.read())) + else: + i = 1 + while i < len(sys.argv): + if sys.argv[i][:2] == '--': + kwargs[sys.argv[i][2:]] = sys.argv[i+1] + i += 2 + else: + cmdargs.append(sys.argv[i]) + i += 1 + cmd = cmdargs[0] + args = preargs + cmdargs[1:] + o = vars()[cmd](*args, **kwargs) + if isinstance(o, (list, dict)): + print json.dumps(o) + else: + print o diff --git a/src/lib/pybitcointools/setup.py b/src/lib/pybitcointools/setup.py new file mode 100644 index 00000000..e01a9bfc --- /dev/null +++ b/src/lib/pybitcointools/setup.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python +try: + from setuptools import setup +except ImportError: + from distutils.core import setup + +setup(name='bitcoin', + version='1.1.42', + description='Python Bitcoin Tools', + author='Vitalik Buterin', + author_email='vbuterin@gmail.com', + url='http://github.com/vbuterin/pybitcointools', + packages=['bitcoin'], + scripts=['pybtctool'], + include_package_data=True, + data_files=[("", ["LICENSE"]), ("bitcoin", ["bitcoin/english.txt"])], + ) diff --git a/src/lib/pybitcointools/test.py b/src/lib/pybitcointools/test.py new file mode 100644 index 00000000..59edaace --- /dev/null +++ b/src/lib/pybitcointools/test.py @@ -0,0 +1,496 @@ +import json +import os +import random +import unittest + +import bitcoin.ripemd as ripemd +from bitcoin import * + + +class TestECCArithmetic(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Starting ECC arithmetic tests') + + def test_all(self): + for i in range(8): + print('### Round %d' % (i+1)) + x, y = random.randrange(2**256), random.randrange(2**256) + self.assertEqual( + multiply(multiply(G, x), y)[0], + multiply(multiply(G, y), x)[0] + ) + self.assertEqual( + + add_pubkeys(multiply(G, x), multiply(G, y))[0], + multiply(G, add_privkeys(x, y))[0] + ) + + hx, hy = encode(x % N, 16, 64), encode(y % N, 16, 64) + self.assertEqual( + multiply(multiply(G, hx), hy)[0], + multiply(multiply(G, hy), hx)[0] + ) + self.assertEqual( + add_pubkeys(multiply(G, hx), multiply(G, hy))[0], + multiply(G, add_privkeys(hx, hy))[0] + ) + self.assertEqual( + b58check_to_hex(pubtoaddr(privtopub(x))), + b58check_to_hex(pubtoaddr(multiply(G, hx), 23)) + ) + + p = privtopub(sha256(str(x))) + if i % 2 == 1: + p = changebase(p, 16, 256) + self.assertEqual(p, decompress(compress(p))) + self.assertEqual(G[0], multiply(divide(G, x), x)[0]) + + +class TestBases(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Starting base change tests') + + def test_all(self): + data = [ + [10, '65535', 16, 'ffff'], + [16, 'deadbeef', 10, '3735928559'], + [10, '0', 16, ''], + [256, b'34567', 10, '219919234615'], + [10, '444', 16, '1bc'], + [256, b'\x03\x04\x05\x06\x07', 10, '12952339975'], + [16, '3132333435', 256, b'12345'] + ] + for prebase, preval, postbase, postval in data: + self.assertEqual(changebase(preval, prebase, postbase), postval) + + for i in range(100): + x = random.randrange(1, 9999999999999999) + frm = random.choice([2, 10, 16, 58, 256]) + to = random.choice([2, 10, 16, 58, 256]) + self.assertEqual(decode(encode(x, to), to), x) + self.assertEqual(changebase(encode(x, frm), frm, to), encode(x, to)) + self.assertEqual(decode(changebase(encode(x, frm), frm, to), to), x) + + +class TestElectrumWalletInternalConsistency(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Starting Electrum wallet internal consistency tests') + + def test_all(self): + for i in range(3): + seed = sha256(str(random.randrange(2**40)))[:32] + mpk = electrum_mpk(seed) + for i in range(5): + pk = electrum_privkey(seed, i) + pub = electrum_pubkey((mpk, seed)[i % 2], i) + pub2 = privtopub(pk) + self.assertEqual( + pub, + pub2, + 'Does not match! Details:\nseed: %s\nmpk: %s\npriv: %s\npub: %s\npub2: %s' % ( + seed, mpk, pk, pub, pub2 + ) + ) + + +class TestRawSignRecover(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print("Basic signing and recovery tests") + + def test_all(self): + for i in range(20): + k = sha256(str(i)) + s = ecdsa_raw_sign('35' * 32, k) + self.assertEqual( + ecdsa_raw_recover('35' * 32, s), + decode_pubkey(privtopub(k)) + ) + + +class TestTransactionSignVerify(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print("Transaction-style signing and verification tests") + + def test_all(self): + alphabet = "1234567890qwertyuiopasdfghjklzxcvbnm" + for i in range(10): + msg = ''.join([random.choice(alphabet) for i in range(random.randrange(20, 200))]) + priv = sha256(str(random.randrange(2**256))) + pub = privtopub(priv) + sig = ecdsa_tx_sign(msg, priv) + self.assertTrue( + ecdsa_tx_verify(msg, sig, pub), + "Verification error" + ) + + self.assertIn( + pub, + ecdsa_tx_recover(msg, sig), + "Recovery failed" + ) + + +class TestSerialize(unittest.TestCase): + + def test_serialize(self): + tx = '0100000001239f932c780e517015842f3b02ff765fba97f9f63f9f1bc718b686a56ed9c73400000000fd5d010047304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053aeffffffff0310270000000000001976a914a89733100315c37d228a529853af341a9d290a4588ac409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd87409c00000000000017a9142b56f9a4009d9ff99b8f97bea4455cd71135f5dd8700000000' + self.assertEqual( + serialize(deserialize(tx)), + tx, + "Serialize roundtrip failed" + ) + + def test_serialize_script(self): + script = '47304402200c40fa58d3f6d5537a343cf9c8d13bc7470baf1d13867e0de3e535cd6b4354c802200f2b48f67494835b060d0b2ff85657d2ba2d9ea4e697888c8cb580e8658183a801483045022056f488c59849a4259e7cef70fe5d6d53a4bd1c59a195b0577bd81cb76044beca022100a735b319fa66af7b178fc719b93f905961ef4d4446deca8757a90de2106dd98a014cc95241046c7d87fd72caeab48e937f2feca9e9a4bd77f0eff4ebb2dbbb9855c023e334e188d32aaec4632ea4cbc575c037d8101aec73d029236e7b1c2380f3e4ad7edced41046fd41cddf3bbda33a240b417a825cc46555949917c7ccf64c59f42fd8dfe95f34fae3b09ed279c8c5b3530510e8cca6230791102eef9961d895e8db54af0563c410488d618b988efd2511fc1f9c03f11c210808852b07fe46128c1a6b1155aa22cdf4b6802460ba593db2d11c7e6cbe19cedef76b7bcabd05d26fd97f4c5a59b225053ae' + self.assertEqual( + serialize_script(deserialize_script(script)), + script, + "Script serialize roundtrip failed" + ) + + +class TestTransaction(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("Attempting transaction creation") + + # FIXME: I don't know how to write this as a unit test. + # What should be asserted? + def test_all(self): + privs = [sha256(str(random.randrange(2**256))) for x in range(4)] + pubs = [privtopub(priv) for priv in privs] + addresses = [pubtoaddr(pub) for pub in pubs] + mscript = mk_multisig_script(pubs[1:], 2, 3) + msigaddr = p2sh_scriptaddr(mscript) + tx = mktx(['01'*32+':1', '23'*32+':2'], [msigaddr+':20202', addresses[0]+':40404']) + tx1 = sign(tx, 1, privs[0]) + + sig1 = multisign(tx, 0, mscript, privs[1]) + self.assertTrue(verify_tx_input(tx1, 0, mscript, sig1, pubs[1]), "Verification Error") + + sig3 = multisign(tx, 0, mscript, privs[3]) + self.assertTrue(verify_tx_input(tx1, 0, mscript, sig3, pubs[3]), "Verification Error") + + tx2 = apply_multisignatures(tx1, 0, mscript, [sig1, sig3]) + print("Outputting transaction: ", tx2) + + # https://github.com/vbuterin/pybitcointools/issues/71 + def test_multisig(self): + script = mk_multisig_script(["0254236f7d1124fc07600ad3eec5ac47393bf963fbf0608bcce255e685580d16d9", + "03560cad89031c412ad8619398bd43b3d673cb5bdcdac1afc46449382c6a8e0b2b"], + 2) + + self.assertEqual(p2sh_scriptaddr(script), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3") + + self.assertEqual(p2sh_scriptaddr(script, 0x05), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3") + self.assertEqual(p2sh_scriptaddr(script, 5), "33byJBaS5N45RHFcatTSt9ZjiGb6nK4iV3") + + self.assertEqual(p2sh_scriptaddr(script, 0xc4), "2MuABMvWTgpZRd4tAG25KW6YzvcoGVZDZYP") + self.assertEqual(p2sh_scriptaddr(script, 196), "2MuABMvWTgpZRd4tAG25KW6YzvcoGVZDZYP") + + +class TestDeterministicGenerate(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("Beginning RFC6979 deterministic signing tests") + + def test_all(self): + # Created with python-ecdsa 0.9 + # Code to make your own vectors: + # class gen: + # def order(self): return 115792089237316195423570985008687907852837564279074904382605163141518161494337 + # dummy = gen() + # for i in range(10): ecdsa.rfc6979.generate_k(dummy, i, hashlib.sha256, hashlib.sha256(str(i)).digest()) + test_vectors = [ + 32783320859482229023646250050688645858316445811207841524283044428614360139869, + 109592113955144883013243055602231029997040992035200230706187150761552110229971, + 65765393578006003630736298397268097590176526363988568884298609868706232621488, + 85563144787585457107933685459469453513056530050186673491900346620874099325918, + 99829559501561741463404068005537785834525504175465914981205926165214632019533, + 7755945018790142325513649272940177083855222863968691658328003977498047013576, + 81516639518483202269820502976089105897400159721845694286620077204726637043798, + 52824159213002398817852821148973968315579759063230697131029801896913602807019, + 44033460667645047622273556650595158811264350043302911918907282441675680538675, + 32396602643737403620316035551493791485834117358805817054817536312402837398361 + ] + + for i, ti in enumerate(test_vectors): + mine = deterministic_generate_k(bin_sha256(str(i)), encode(i, 256, 32)) + self.assertEqual( + ti, + mine, + "Test vector does not match. Details:\n%s\n%s" % ( + ti, + mine + ) + ) + + +class TestBIP0032(unittest.TestCase): + """See: https://en.bitcoin.it/wiki/BIP_0032""" + @classmethod + def setUpClass(cls): + print("Beginning BIP0032 tests") + + def _full_derive(self, key, chain): + if len(chain) == 0: + return key + elif chain[0] == 'pub': + return self._full_derive(bip32_privtopub(key), chain[1:]) + else: + return self._full_derive(bip32_ckd(key, chain[0]), chain[1:]) + + def test_all(self): + test_vectors = [ + [[], 'xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi'], + [['pub'], 'xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8'], + [[2**31], 'xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7'], + [[2**31, 1], 'xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs'], + [[2**31, 1, 2**31 + 2], 'xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM'], + [[2**31, 1, 2**31 + 2, 'pub', 2, 1000000000], 'xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy'] + ] + + mk = bip32_master_key(safe_from_hex('000102030405060708090a0b0c0d0e0f')) + + for tv in test_vectors: + left, right = self._full_derive(mk, tv[0]), tv[1] + self.assertEqual( + left, + right, + "Test vector does not match. Details: \n%s\n%s\n\%s" % ( + tv[0], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)], + ) + ) + + def test_all_testnet(self): + test_vectors = [ + [[], 'tprv8ZgxMBicQKsPeDgjzdC36fs6bMjGApWDNLR9erAXMs5skhMv36j9MV5ecvfavji5khqjWaWSFhN3YcCUUdiKH6isR4Pwy3U5y5egddBr16m'], + [['pub'], 'tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp'], + [[2**31], 'tprv8bxNLu25VazNnppTCP4fyhyCvBHcYtzE3wr3cwYeL4HA7yf6TLGEUdS4QC1vLT63TkjRssqJe4CvGNEC8DzW5AoPUw56D1Ayg6HY4oy8QZ9'], + [[2**31, 1], 'tprv8e8VYgZxtHsSdGrtvdxYaSrryZGiYviWzGWtDDKTGh5NMXAEB8gYSCLHpFCywNs5uqV7ghRjimALQJkRFZnUrLHpzi2pGkwqLtbubgWuQ8q'], + [[2**31, 1, 2**31 + 2], 'tprv8gjmbDPpbAirVSezBEMuwSu1Ci9EpUJWKokZTYccSZSomNMLytWyLdtDNHRbucNaRJWWHANf9AzEdWVAqahfyRjVMKbNRhBmxAM8EJr7R15'], + [[2**31, 1, 2**31 + 2, 'pub', 2, 1000000000], 'tpubDHNy3kAG39ThyiwwsgoKY4iRenXDRtce8qdCFJZXPMCJg5dsCUHayp84raLTpvyiNA9sXPob5rgqkKvkN8S7MMyXbnEhGJMW64Cf4vFAoaF'] + ] + + mk = bip32_master_key(safe_from_hex('000102030405060708090a0b0c0d0e0f'), TESTNET_PRIVATE) + + for tv in test_vectors: + left, right = self._full_derive(mk, tv[0]), tv[1] + self.assertEqual( + left, + right, + "Test vector does not match. Details:\n%s\n%s\n%s\n\%s" % ( + left, + tv[0], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)], + [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)], + ) + ) + + def test_extra(self): + master = bip32_master_key(safe_from_hex("000102030405060708090a0b0c0d0e0f")) + + # m/0 + assert bip32_ckd(master, "0") == "xprv9uHRZZhbkedL37eZEnyrNsQPFZYRAvjy5rt6M1nbEkLSo378x1CQQLo2xxBvREwiK6kqf7GRNvsNEchwibzXaV6i5GcsgyjBeRguXhKsi4R" + assert bip32_privtopub(bip32_ckd(master, "0")) == "xpub68Gmy5EVb2BdFbj2LpWrk1M7obNuaPTpT5oh9QCCo5sRfqSHVYWex97WpDZzszdzHzxXDAzPLVSwybe4uPYkSk4G3gnrPqqkV9RyNzAcNJ1" + + # m/1 + assert bip32_ckd(master, "1") == "xprv9uHRZZhbkedL4yTpidDvuVfrdUkTbhDHviERRBkbzbNDZeMjWzqzKAdxWhzftGDSxDmBdakjqHiZJbkwiaTEXJdjZAaAjMZEE3PMbMrPJih" + assert bip32_privtopub(bip32_ckd(master, "1")) == "xpub68Gmy5EVb2BdHTYHpekwGdcbBWax19w9HwA2DaADYvuCSSgt4YAErxxSN1KWSnmyqkwRNbnTj3XiUBKmHeC8rTjLRPjSULcDKQQgfgJDppq" + + # m/0/0 + assert bip32_ckd(bip32_ckd(master, "0"), "0") == "xprv9ww7sMFLzJMzur2oEQDB642fbsMS4q6JRraMVTrM9bTWBq7NDS8ZpmsKVB4YF3mZecqax1fjnsPF19xnsJNfRp4RSyexacULXMKowSACTRc" + assert bip32_privtopub(bip32_ckd(bip32_ckd(master, "0"), "0")) == "xpub6AvUGrnEpfvJ8L7GLRkBTByQ9uBvUHp9o5VxHrFxhvzV4dSWkySpNaBoLR9FpbnwRmTa69yLHF3QfcaxbWT7gWdwws5k4dpmJvqpEuMWwnj" + + # m/0' + assert bip32_ckd(master, 2**31) == "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7" + assert bip32_privtopub(bip32_ckd(master, 2**31)) == "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw" + + # m/1' + assert bip32_ckd(master, 2**31 + 1) == "xprv9uHRZZhk6KAJFszJGW6LoUFq92uL7FvkBhmYiMurCWPHLJZkX2aGvNdRUBNnJu7nv36WnwCN59uNy6sxLDZvvNSgFz3TCCcKo7iutQzpg78" + assert bip32_privtopub(bip32_ckd(master, 2**31 + 1)) == "xpub68Gmy5EdvgibUN4mNXdMAcCZh4jpWiebYvh9WkKTkqvGD6tu4ZtXUAwuKSyF5DFZVmotf9UHFTGqSXo9qyDBSn47RkaN6Aedt9JbL7zcgSL" + + # m/1' + assert bip32_ckd(master, 1 + 2**31) == "xprv9uHRZZhk6KAJFszJGW6LoUFq92uL7FvkBhmYiMurCWPHLJZkX2aGvNdRUBNnJu7nv36WnwCN59uNy6sxLDZvvNSgFz3TCCcKo7iutQzpg78" + assert bip32_privtopub(bip32_ckd(master, 1 + 2**31)) == "xpub68Gmy5EdvgibUN4mNXdMAcCZh4jpWiebYvh9WkKTkqvGD6tu4ZtXUAwuKSyF5DFZVmotf9UHFTGqSXo9qyDBSn47RkaN6Aedt9JbL7zcgSL" + + # m/0'/0 + assert bip32_ckd(bip32_ckd(master, 2**31), "0") == "xprv9wTYmMFdV23N21MM6dLNavSQV7Sj7meSPXx6AV5eTdqqGLjycVjb115Ec5LgRAXscPZgy5G4jQ9csyyZLN3PZLxoM1h3BoPuEJzsgeypdKj" + assert bip32_privtopub(bip32_ckd(bip32_ckd(master, 2**31), "0")) == "xpub6ASuArnXKPbfEVRpCesNx4P939HDXENHkksgxsVG1yNp9958A33qYoPiTN9QrJmWFa2jNLdK84bWmyqTSPGtApP8P7nHUYwxHPhqmzUyeFG" + + # m/0'/0' + assert bip32_ckd(bip32_ckd(master, 2**31), 2**31) == "xprv9wTYmMFmpgaLB5Hge4YtaGqCKpsYPTD9vXWSsmdZrNU3Y2i4WoBykm6ZteeCLCCZpGxdHQuqEhM6Gdo2X6CVrQiTw6AAneF9WSkA9ewaxtS" + assert bip32_privtopub(bip32_ckd(bip32_ckd(master, 2**31), 2**31)) == "xpub6ASuArnff48dPZN9k65twQmvsri2nuw1HkS3gA3BQi12Qq3D4LWEJZR3jwCAr1NhsFMcQcBkmevmub6SLP37bNq91SEShXtEGUbX3GhNaGk" + + # m/44'/0'/0'/0/0 + assert bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(master, 44 + 2**31), 2**31), 2**31), 0), 0) == "xprvA4A9CuBXhdBtCaLxwrw64Jaran4n1rgzeS5mjH47Ds8V67uZS8tTkG8jV3BZi83QqYXPcN4v8EjK2Aof4YcEeqLt688mV57gF4j6QZWdP9U" + assert bip32_privtopub(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(bip32_ckd(master, 44 + 2**31), 2**31), 2**31), 0), 0)) == "xpub6H9VcQiRXzkBR4RS3tU6RSXb8ouGRKQr1f1NXfTinCfTxvEhygCiJ4TDLHz1dyQ6d2Vz8Ne7eezkrViwaPo2ZMsNjVtFwvzsQXCDV6HJ3cV" + + +class TestStartingAddressAndScriptGenerationConsistency(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("Starting address and script generation consistency tests") + + def test_all(self): + for i in range(5): + a = privtoaddr(random_key()) + self.assertEqual(a, script_to_address(address_to_script(a))) + self.assertEqual(a, script_to_address(address_to_script(a), 0)) + self.assertEqual(a, script_to_address(address_to_script(a), 0x00)) + + b = privtoaddr(random_key(), 5) + self.assertEqual(b, script_to_address(address_to_script(b))) + self.assertEqual(b, script_to_address(address_to_script(b), 0)) + self.assertEqual(b, script_to_address(address_to_script(b), 0x00)) + self.assertEqual(b, script_to_address(address_to_script(b), 5)) + self.assertEqual(b, script_to_address(address_to_script(b), 0x05)) + + + for i in range(5): + a = privtoaddr(random_key(), 0x6f) + self.assertEqual(a, script_to_address(address_to_script(a), 111)) + self.assertEqual(a, script_to_address(address_to_script(a), 0x6f)) + + b = privtoaddr(random_key(), 0xc4) + self.assertEqual(b, script_to_address(address_to_script(b), 111)) + self.assertEqual(b, script_to_address(address_to_script(b), 0x6f)) + self.assertEqual(b, script_to_address(address_to_script(b), 196)) + self.assertEqual(b, script_to_address(address_to_script(b), 0xc4)) + + +class TestRipeMD160PythonBackup(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Testing the pure python backup for ripemd160') + + def test_all(self): + strvec = [ + '', + 'The quick brown fox jumps over the lazy dog', + 'The quick brown fox jumps over the lazy cog', + 'Nobody inspects the spammish repetition' + ] + + target = [ + '9c1185a5c5e9fc54612808977ee8f548b2258d31', + '37f332f68db77bd9d7edd4969571ad671cf9dd3b', + '132072df690933835eb8b6ad0b77e7b6f14acad7', + 'cc4a5ce1b3df48aec5d22d1f16b894a0b894eccc' + ] + + hash160target = [ + 'b472a266d0bd89c13706a4132ccfb16f7c3b9fcb', + '0e3397b4abc7a382b3ea2365883c3c7ca5f07600', + '53e0dacac5249e46114f65cb1f30d156b14e0bdc', + '1c9b7b48049a8f98699bca22a5856c5ef571cd68' + ] + + for i, s in enumerate(strvec): + digest = ripemd.RIPEMD160(s).digest() + hash160digest = ripemd.RIPEMD160(bin_sha256(s)).digest() + self.assertEqual(bytes_to_hex_string(digest), target[i]) + self.assertEqual(bytes_to_hex_string(hash160digest), hash160target[i]) + self.assertEqual(bytes_to_hex_string(bin_hash160(from_string_to_bytes(s))), hash160target[i]) + self.assertEqual(hash160(from_string_to_bytes(s)), hash160target[i]) + + +class TestScriptVsAddressOutputs(unittest.TestCase): + + @classmethod + def setUpClass(cls): + print('Testing script vs address outputs') + + def test_all(self): + addr0 = '1Lqgj1ThNfwLgHMp5qJUerYsuUEm8vHmVG' + script0 = '76a914d99f84267d1f90f3e870a5e9d2399918140be61d88ac' + addr1 = '31oSGBBNrpCiENH3XMZpiP6GTC4tad4bMy' + script1 = 'a9140136d001619faba572df2ef3d193a57ad29122d987' + + inputs = [{ + 'output': 'cd6219ea108119dc62fce09698b649efde56eca7ce223a3315e8b431f6280ce7:0', + 'value': 158000 + }] + + outputs = [ + [{'address': addr0, 'value': 1000}, {'address': addr1, 'value': 2000}], + [{'script': script0, 'value': 1000}, {'address': addr1, 'value': 2000}], + [{'address': addr0, 'value': 1000}, {'script': script1, 'value': 2000}], + [{'script': script0, 'value': 1000}, {'script': script1, 'value': 2000}], + [addr0 + ':1000', addr1 + ':2000'], + [script0 + ':1000', addr1 + ':2000'], + [addr0 + ':1000', script1 + ':2000'], + [script0 + ':1000', script1 + ':2000'] + ] + + for outs in outputs: + tx_struct = deserialize(mktx(inputs, outs)) + self.assertEqual(tx_struct['outs'], outputs[3]) + + +class TestConversions(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.privkey_hex = ( + "e9873d79c6d87dc0fb6a5778633389f4453213303da61f20bd67fc233aa33262" + ) + cls.privkey_bin = ( + b"\xe9\x87=y\xc6\xd8}\xc0\xfbjWxc3\x89\xf4E2\x130=\xa6\x1f \xbdg\xfc#:\xa32b" + ) + + cls.pubkey_hex = ( + "04588d202afcc1ee4ab5254c7847ec25b9a135bbda0f2bc69ee1a714749fd77dc9f88ff2a00d7e752d44cbe16e1ebcf0890b76ec7c78886109dee76ccfc8445424" + ) + cls.pubkey_bin = ( + b"\x04X\x8d *\xfc\xc1\xeeJ\xb5%LxG\xec%\xb9\xa15\xbb\xda\x0f+\xc6\x9e\xe1\xa7\x14t\x9f\xd7}\xc9\xf8\x8f\xf2\xa0\r~u-D\xcb\xe1n\x1e\xbc\xf0\x89\x0bv\xec|x\x88a\t\xde\xe7l\xcf\xc8DT$" + ) + + def test_privkey_to_pubkey(self): + pubkey_hex = privkey_to_pubkey(self.privkey_hex) + self.assertEqual(pubkey_hex, self.pubkey_hex) + + def test_changebase(self): + self.assertEqual( + self.pubkey_bin, + changebase( + self.pubkey_hex, 16, 256, minlen=len(self.pubkey_bin) + ) + ) + + self.assertEqual( + self.pubkey_hex, + changebase( + self.pubkey_bin, 256, 16, minlen=len(self.pubkey_hex) + ) + ) + + self.assertEqual( + self.privkey_bin, + changebase( + self.privkey_hex, 16, 256, minlen=len(self.privkey_bin) + ) + ) + + self.assertEqual( + self.privkey_hex, + changebase( + self.privkey_bin, 256, 16, minlen=len(self.privkey_hex) + ) + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/src/lib/pybitcointools/test_stealth.py b/src/lib/pybitcointools/test_stealth.py new file mode 100644 index 00000000..01a1432d --- /dev/null +++ b/src/lib/pybitcointools/test_stealth.py @@ -0,0 +1,92 @@ +import bitcoin as bc +import sys +import unittest + +class TestStealth(unittest.TestCase): + + def setUp(self): + + if sys.getrecursionlimit() < 1000: + sys.setrecursionlimit(1000) + + self.addr = 'vJmtjxSDxNPXL4RNapp9ARdqKz3uJyf1EDGjr1Fgqs9c8mYsVH82h8wvnA4i5rtJ57mr3kor1EVJrd4e5upACJd588xe52yXtzumxj' + self.scan_pub = '025e58a31122b38c86abc119b9379fe247410aee87a533f9c07b189aef6c3c1f52' + self.scan_priv = '3e49e7257cb31db997edb1cf8299af0f37e2663e2260e4b8033e49d39a6d02f2' + self.spend_pub = '03616562c98e7d7b74be409a787cec3a912122f3fb331a9bee9b0b73ce7b9f50af' + self.spend_priv = 'aa3db0cfb3edc94de4d10f873f8190843f2a17484f6021a95a7742302c744748' + self.ephem_pub = '03403d306ec35238384c7e340393335f9bc9bb4a2e574eb4e419452c4ea19f14b0' + self.ephem_priv = '9e63abaf8dcd5ea3919e6de0b6c544e00bf51bf92496113a01d6e369944dc091' + self.shared_secret = 'a4047ee231f4121e3a99a3a3378542e34a384b865a9917789920e1f13ffd91c6' + self.pay_pub = '02726112ad39cb6bf848b1b1ef30b88e35286bf99f746c2be575f96c0e02a9357c' + self.pay_priv = '4e422fb1e5e1db6c1f6ab32a7706d368ceb385e7fab098e633c5c5949c3b97cd' + + self.testnet_addr = 'waPUuLLykSnY3itzf1AyrQZm42F7KyB7SR5zpfqmnzPXWhx9kXLzV3EcyqzDdpTwngiyCCMUqztS9S1d7XJs3JMt3MsHPDpBCudvx9' + + def test_address_encoding(self): + + sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.addr) + self.assertEqual(sc_pub, self.scan_pub) + self.assertEqual(sp_pub, self.spend_pub) + + stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub) + self.assertEqual(stealth_addr2, self.addr) + + magic_byte_testnet = 43 + sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.testnet_addr) + self.assertEqual(sc_pub, self.scan_pub) + self.assertEqual(sp_pub, self.spend_pub) + + stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub, magic_byte_testnet) + self.assertEqual(stealth_addr2, self.testnet_addr) + + def test_shared_secret(self): + + sh_sec = bc.shared_secret_sender(self.scan_pub, self.ephem_priv) + self.assertEqual(sh_sec, self.shared_secret) + + sh_sec2 = bc.shared_secret_receiver(self.ephem_pub, self.scan_priv) + self.assertEqual(sh_sec2, self.shared_secret) + + def test_uncover_pay_keys(self): + + pub = bc.uncover_pay_pubkey_sender(self.scan_pub, self.spend_pub, self.ephem_priv) + pub2 = bc.uncover_pay_pubkey_receiver(self.scan_priv, self.spend_pub, self.ephem_pub) + self.assertEqual(pub, self.pay_pub) + self.assertEqual(pub2, self.pay_pub) + + priv = bc.uncover_pay_privkey(self.scan_priv, self.spend_priv, self.ephem_pub) + self.assertEqual(priv, self.pay_priv) + + def test_stealth_metadata_script(self): + + nonce = int('deadbeef', 16) + script = bc.mk_stealth_metadata_script(self.ephem_pub, nonce) + self.assertEqual(script[6:], 'deadbeef' + self.ephem_pub) + + eph_pub = bc.ephem_pubkey_from_tx_script(script) + self.assertEqual(eph_pub, self.ephem_pub) + + def test_stealth_tx_outputs(self): + + nonce = int('deadbeef', 16) + value = 10**8 + outputs = bc.mk_stealth_tx_outputs(self.addr, value, self.ephem_priv, nonce) + + self.assertEqual(outputs[0]['value'], 0) + self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub) + self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub)) + self.assertEqual(outputs[1]['value'], value) + + outputs = bc.mk_stealth_tx_outputs(self.testnet_addr, value, self.ephem_priv, nonce, 'testnet') + + self.assertEqual(outputs[0]['value'], 0) + self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub) + self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub, 111)) + self.assertEqual(outputs[1]['value'], value) + + self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.testnet_addr, value, self.ephem_priv, nonce, 'btc') + + self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.addr, value, self.ephem_priv, nonce, 'testnet') + +if __name__ == '__main__': + unittest.main() diff --git a/COPYING b/src/lib/pyelliptic/LICENSE similarity index 99% rename from COPYING rename to src/lib/pyelliptic/LICENSE index f288702d..94a9ed02 100644 --- a/COPYING +++ b/src/lib/pyelliptic/LICENSE @@ -1,7 +1,7 @@ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 - Copyright (C) 2007 Free Software Foundation, Inc. + Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. @@ -645,7 +645,7 @@ the "copyright" line and a pointer to where the full notice is found. GNU General Public License for more details. You should have received a copy of the GNU General Public License - along with this program. If not, see . + along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. @@ -664,11 +664,11 @@ might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see -. +. The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read -. +. diff --git a/src/lib/pyelliptic/README.md b/src/lib/pyelliptic/README.md new file mode 100644 index 00000000..587b1445 --- /dev/null +++ b/src/lib/pyelliptic/README.md @@ -0,0 +1,67 @@ +# PyElliptic + +PyElliptic is a high level wrapper for the cryptographic library : OpenSSL. +Under the GNU General Public License + +Python3 compatible. For GNU/Linux and Windows. +Require OpenSSL + +## Features + +### Asymmetric cryptography using Elliptic Curve Cryptography (ECC) + +* Key agreement : ECDH +* Digital signatures : ECDSA +* Hybrid encryption : ECIES (like RSA) + +### Symmetric cryptography + +* AES-128 (CBC, OFB, CFB) +* AES-256 (CBC, OFB, CFB) +* Blowfish (CFB and CBC) +* RC4 + +### Other + +* CSPRNG +* HMAC (using SHA512) +* PBKDF2 (SHA256 and SHA512) + +## Example + +```python +#!/usr/bin/python + +import pyelliptic + +# Symmetric encryption +iv = pyelliptic.Cipher.gen_IV('aes-256-cfb') +ctx = pyelliptic.Cipher("secretkey", iv, 1, ciphername='aes-256-cfb') + +ciphertext = ctx.update('test1') +ciphertext += ctx.update('test2') +ciphertext += ctx.final() + +ctx2 = pyelliptic.Cipher("secretkey", iv, 0, ciphername='aes-256-cfb') +print ctx2.ciphering(ciphertext) + +# Asymmetric encryption +alice = pyelliptic.ECC() # default curve: sect283r1 +bob = pyelliptic.ECC(curve='sect571r1') + +ciphertext = alice.encrypt("Hello Bob", bob.get_pubkey()) +print bob.decrypt(ciphertext) + +signature = bob.sign("Hello Alice") +# alice's job : +print pyelliptic.ECC(pubkey=bob.get_pubkey()).verify(signature, "Hello Alice") + +# ERROR !!! +try: + key = alice.get_ecdh_key(bob.get_pubkey()) +except: print("For ECDH key agreement, the keys must be defined on the same curve !") + +alice = pyelliptic.ECC(curve='sect571r1') +print alice.get_ecdh_key(bob.get_pubkey()).encode('hex') +print bob.get_ecdh_key(alice.get_pubkey()).encode('hex') +``` diff --git a/src/lib/pyelliptic/__init__.py b/src/lib/pyelliptic/__init__.py new file mode 100644 index 00000000..761d08af --- /dev/null +++ b/src/lib/pyelliptic/__init__.py @@ -0,0 +1,19 @@ +# Copyright (C) 2010 +# Author: Yann GUIBET +# Contact: + +__version__ = '1.3' + +__all__ = [ + 'OpenSSL', + 'ECC', + 'Cipher', + 'hmac_sha256', + 'hmac_sha512', + 'pbkdf2' +] + +from .openssl import OpenSSL +from .ecc import ECC +from .cipher import Cipher +from .hash import hmac_sha256, hmac_sha512, pbkdf2 diff --git a/src/lib/pyelliptic/arithmetic.py b/src/lib/pyelliptic/arithmetic.py new file mode 100644 index 00000000..1eec381a --- /dev/null +++ b/src/lib/pyelliptic/arithmetic.py @@ -0,0 +1,106 @@ +import hashlib, re + +P = 2**256-2**32-2**9-2**8-2**7-2**6-2**4-1 +A = 0 +Gx = 55066263022277343669578718895168534326250603453777594175500187360389116729240 +Gy = 32670510020758816978083085130507043184471273380659243275938904335757337482424 +G = (Gx,Gy) + +def inv(a,n): + lm, hm = 1,0 + low, high = a%n,n + while low > 1: + r = high/low + nm, new = hm-lm*r, high-low*r + lm, low, hm, high = nm, new, lm, low + return lm % n + +def get_code_string(base): + if base == 2: return '01' + elif base == 10: return '0123456789' + elif base == 16: return "0123456789abcdef" + elif base == 58: return "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" + elif base == 256: return ''.join([chr(x) for x in range(256)]) + else: raise ValueError("Invalid base!") + +def encode(val,base,minlen=0): + code_string = get_code_string(base) + result = "" + while val > 0: + result = code_string[val % base] + result + val /= base + if len(result) < minlen: + result = code_string[0]*(minlen-len(result))+result + return result + +def decode(string,base): + code_string = get_code_string(base) + result = 0 + if base == 16: string = string.lower() + while len(string) > 0: + result *= base + result += code_string.find(string[0]) + string = string[1:] + return result + +def changebase(string,frm,to,minlen=0): + return encode(decode(string,frm),to,minlen) + +def base10_add(a,b): + if a == None: return b[0],b[1] + if b == None: return a[0],a[1] + if a[0] == b[0]: + if a[1] == b[1]: return base10_double(a[0],a[1]) + else: return None + m = ((b[1]-a[1]) * inv(b[0]-a[0],P)) % P + x = (m*m-a[0]-b[0]) % P + y = (m*(a[0]-x)-a[1]) % P + return (x,y) + +def base10_double(a): + if a == None: return None + m = ((3*a[0]*a[0]+A)*inv(2*a[1],P)) % P + x = (m*m-2*a[0]) % P + y = (m*(a[0]-x)-a[1]) % P + return (x,y) + +def base10_multiply(a,n): + if n == 0: return G + if n == 1: return a + if (n%2) == 0: return base10_double(base10_multiply(a,n/2)) + if (n%2) == 1: return base10_add(base10_double(base10_multiply(a,n/2)),a) + +def hex_to_point(h): return (decode(h[2:66],16),decode(h[66:],16)) + +def point_to_hex(p): return '04'+encode(p[0],16,64)+encode(p[1],16,64) + +def multiply(privkey,pubkey): + return point_to_hex(base10_multiply(hex_to_point(pubkey),decode(privkey,16))) + +def privtopub(privkey): + return point_to_hex(base10_multiply(G,decode(privkey,16))) + +def add(p1,p2): + if (len(p1)==32): + return encode(decode(p1,16) + decode(p2,16) % P,16,32) + else: + return point_to_hex(base10_add(hex_to_point(p1),hex_to_point(p2))) + +def hash_160(string): + intermed = hashlib.sha256(string).digest() + ripemd160 = hashlib.new('ripemd160') + ripemd160.update(intermed) + return ripemd160.digest() + +def dbl_sha256(string): + return hashlib.sha256(hashlib.sha256(string).digest()).digest() + +def bin_to_b58check(inp): + inp_fmtd = '\x00' + inp + leadingzbytes = len(re.match('^\x00*',inp_fmtd).group(0)) + checksum = dbl_sha256(inp_fmtd)[:4] + return '1' * leadingzbytes + changebase(inp_fmtd+checksum,256,58) + +#Convert a public key (in hex) to a Bitcoin address +def pubkey_to_address(pubkey): + return bin_to_b58check(hash_160(changebase(pubkey,16,256))) diff --git a/src/lib/pyelliptic/cipher.py b/src/lib/pyelliptic/cipher.py new file mode 100644 index 00000000..b597cafa --- /dev/null +++ b/src/lib/pyelliptic/cipher.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. + +from pyelliptic.openssl import OpenSSL + + +class Cipher: + """ + Symmetric encryption + + import pyelliptic + iv = pyelliptic.Cipher.gen_IV('aes-256-cfb') + ctx = pyelliptic.Cipher("secretkey", iv, 1, ciphername='aes-256-cfb') + ciphertext = ctx.update('test1') + ciphertext += ctx.update('test2') + ciphertext += ctx.final() + + ctx2 = pyelliptic.Cipher("secretkey", iv, 0, ciphername='aes-256-cfb') + print ctx2.ciphering(ciphertext) + """ + def __init__(self, key, iv, do, ciphername='aes-256-cbc'): + """ + do == 1 => Encrypt; do == 0 => Decrypt + """ + self.cipher = OpenSSL.get_cipher(ciphername) + self.ctx = OpenSSL.EVP_CIPHER_CTX_new() + if do == 1 or do == 0: + k = OpenSSL.malloc(key, len(key)) + IV = OpenSSL.malloc(iv, len(iv)) + OpenSSL.EVP_CipherInit_ex( + self.ctx, self.cipher.get_pointer(), 0, k, IV, do) + else: + raise Exception("RTFM ...") + + @staticmethod + def get_all_cipher(): + """ + static method, returns all ciphers available + """ + return OpenSSL.cipher_algo.keys() + + @staticmethod + def get_blocksize(ciphername): + cipher = OpenSSL.get_cipher(ciphername) + return cipher.get_blocksize() + + @staticmethod + def gen_IV(ciphername): + cipher = OpenSSL.get_cipher(ciphername) + return OpenSSL.rand(cipher.get_blocksize()) + + def update(self, input): + i = OpenSSL.c_int(0) + buffer = OpenSSL.malloc(b"", len(input) + self.cipher.get_blocksize()) + inp = OpenSSL.malloc(input, len(input)) + if OpenSSL.EVP_CipherUpdate(self.ctx, OpenSSL.byref(buffer), + OpenSSL.byref(i), inp, len(input)) == 0: + raise Exception("[OpenSSL] EVP_CipherUpdate FAIL ...") + return buffer.raw[0:i.value] + + def final(self): + i = OpenSSL.c_int(0) + buffer = OpenSSL.malloc(b"", self.cipher.get_blocksize()) + if (OpenSSL.EVP_CipherFinal_ex(self.ctx, OpenSSL.byref(buffer), + OpenSSL.byref(i))) == 0: + raise Exception("[OpenSSL] EVP_CipherFinal_ex FAIL ...") + return buffer.raw[0:i.value] + + def ciphering(self, input): + """ + Do update and final in one method + """ + buff = self.update(input) + return buff + self.final() + + def __del__(self): + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_CIPHER_CTX_reset(self.ctx) + else: + OpenSSL.EVP_CIPHER_CTX_cleanup(self.ctx) + OpenSSL.EVP_CIPHER_CTX_free(self.ctx) diff --git a/src/lib/pyelliptic/ecc.py b/src/lib/pyelliptic/ecc.py new file mode 100644 index 00000000..bea645db --- /dev/null +++ b/src/lib/pyelliptic/ecc.py @@ -0,0 +1,480 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. + +from hashlib import sha512 +from pyelliptic.openssl import OpenSSL +from pyelliptic.cipher import Cipher +from pyelliptic.hash import hmac_sha256, equals +from struct import pack, unpack + + +class ECC: + """ + Asymmetric encryption with Elliptic Curve Cryptography (ECC) + ECDH, ECDSA and ECIES + + import pyelliptic + + alice = pyelliptic.ECC() # default curve: sect283r1 + bob = pyelliptic.ECC(curve='sect571r1') + + ciphertext = alice.encrypt("Hello Bob", bob.get_pubkey()) + print bob.decrypt(ciphertext) + + signature = bob.sign("Hello Alice") + # alice's job : + print pyelliptic.ECC( + pubkey=bob.get_pubkey()).verify(signature, "Hello Alice") + + # ERROR !!! + try: + key = alice.get_ecdh_key(bob.get_pubkey()) + except: print("For ECDH key agreement,\ + the keys must be defined on the same curve !") + + alice = pyelliptic.ECC(curve='sect571r1') + print alice.get_ecdh_key(bob.get_pubkey()).encode('hex') + print bob.get_ecdh_key(alice.get_pubkey()).encode('hex') + + """ + def __init__(self, pubkey=None, privkey=None, pubkey_x=None, + pubkey_y=None, raw_privkey=None, curve='sect283r1'): + """ + For a normal and High level use, specifie pubkey, + privkey (if you need) and the curve + """ + if type(curve) == str: + self.curve = OpenSSL.get_curve(curve) + else: + self.curve = curve + + if pubkey_x is not None and pubkey_y is not None: + self._set_keys(pubkey_x, pubkey_y, raw_privkey) + elif pubkey is not None: + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + if privkey is not None: + curve2, raw_privkey, i = ECC._decode_privkey(privkey) + if curve != curve2: + raise Exception("Bad ECC keys ...") + self.curve = curve + self._set_keys(pubkey_x, pubkey_y, raw_privkey) + else: + self.privkey, self.pubkey_x, self.pubkey_y = self._generate() + + def _set_keys(self, pubkey_x, pubkey_y, privkey): + if self.raw_check_key(privkey, pubkey_x, pubkey_y) < 0: + self.pubkey_x = None + self.pubkey_y = None + self.privkey = None + raise Exception("Bad ECC keys ...") + else: + self.pubkey_x = pubkey_x + self.pubkey_y = pubkey_y + self.privkey = privkey + + @staticmethod + def get_curves(): + """ + static method, returns the list of all the curves available + """ + return OpenSSL.curves.keys() + + def get_curve(self): + return OpenSSL.get_curve_by_id(self.curve) + + def get_curve_id(self): + return self.curve + + def get_pubkey(self): + """ + High level function which returns : + curve(2) + len_of_pubkeyX(2) + pubkeyX + len_of_pubkeyY + pubkeyY + """ + return b''.join((pack('!H', self.curve), + pack('!H', len(self.pubkey_x)), + self.pubkey_x, + pack('!H', len(self.pubkey_y)), + self.pubkey_y + )) + + def get_privkey(self): + """ + High level function which returns + curve(2) + len_of_privkey(2) + privkey + """ + return b''.join((pack('!H', self.curve), + pack('!H', len(self.privkey)), + self.privkey + )) + + @staticmethod + def _decode_pubkey(pubkey): + i = 0 + curve = unpack('!H', pubkey[i:i + 2])[0] + i += 2 + tmplen = unpack('!H', pubkey[i:i + 2])[0] + i += 2 + pubkey_x = pubkey[i:i + tmplen] + i += tmplen + tmplen = unpack('!H', pubkey[i:i + 2])[0] + i += 2 + pubkey_y = pubkey[i:i + tmplen] + i += tmplen + return curve, pubkey_x, pubkey_y, i + + @staticmethod + def _decode_privkey(privkey): + i = 0 + curve = unpack('!H', privkey[i:i + 2])[0] + i += 2 + tmplen = unpack('!H', privkey[i:i + 2])[0] + i += 2 + privkey = privkey[i:i + tmplen] + i += tmplen + return curve, privkey, i + + def _generate(self): + try: + pub_key_x = OpenSSL.BN_new() + pub_key_y = OpenSSL.BN_new() + + key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + if (OpenSSL.EC_KEY_generate_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_generate_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + priv_key = OpenSSL.EC_KEY_get0_private_key(key) + + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_KEY_get0_public_key(key) + + if (OpenSSL.EC_POINT_get_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, 0 + )) == 0: + raise Exception( + "[OpenSSL] EC_POINT_get_affine_coordinates_GFp FAIL ...") + + privkey = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(priv_key)) + pubkeyx = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(pub_key_x)) + pubkeyy = OpenSSL.malloc(0, OpenSSL.BN_num_bytes(pub_key_y)) + OpenSSL.BN_bn2bin(priv_key, privkey) + privkey = privkey.raw + OpenSSL.BN_bn2bin(pub_key_x, pubkeyx) + pubkeyx = pubkeyx.raw + OpenSSL.BN_bn2bin(pub_key_y, pubkeyy) + pubkeyy = pubkeyy.raw + self.raw_check_key(privkey, pubkeyx, pubkeyy) + + return privkey, pubkeyx, pubkeyy + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + + def get_ecdh_key(self, pubkey): + """ + High level function. Compute public key with the local private key + and returns a 512bits shared key + """ + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + if curve != self.curve: + raise Exception("ECC keys must be from the same curve !") + return sha512(self.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + + def raw_get_ecdh_key(self, pubkey_x, pubkey_y): + try: + ecdh_keybuffer = OpenSSL.malloc(0, 32) + + other_key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if other_key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + + other_pub_key_x = OpenSSL.BN_bin2bn(pubkey_x, len(pubkey_x), 0) + other_pub_key_y = OpenSSL.BN_bin2bn(pubkey_y, len(pubkey_y), 0) + + other_group = OpenSSL.EC_KEY_get0_group(other_key) + other_pub_key = OpenSSL.EC_POINT_new(other_group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(other_group, + other_pub_key, + other_pub_key_x, + other_pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(other_key, other_pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(other_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + + own_key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if own_key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + own_priv_key = OpenSSL.BN_bin2bn( + self.privkey, len(self.privkey), 0) + + if (OpenSSL.EC_KEY_set_private_key(own_key, own_priv_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...") + + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EC_KEY_set_method(own_key, OpenSSL.EC_KEY_OpenSSL()) + else: + OpenSSL.ECDH_set_method(own_key, OpenSSL.ECDH_OpenSSL()) + ecdh_keylen = OpenSSL.ECDH_compute_key( + ecdh_keybuffer, 32, other_pub_key, own_key, 0) + + if ecdh_keylen != 32: + raise Exception("[OpenSSL] ECDH keylen FAIL ...") + + return ecdh_keybuffer.raw + + finally: + OpenSSL.EC_KEY_free(other_key) + OpenSSL.BN_free(other_pub_key_x) + OpenSSL.BN_free(other_pub_key_y) + OpenSSL.EC_POINT_free(other_pub_key) + OpenSSL.EC_KEY_free(own_key) + OpenSSL.BN_free(own_priv_key) + + def check_key(self, privkey, pubkey): + """ + Check the public key and the private key. + The private key is optional (replace by None) + """ + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + if privkey is None: + raw_privkey = None + curve2 = curve + else: + curve2, raw_privkey, i = ECC._decode_privkey(privkey) + if curve != curve2: + raise Exception("Bad public and private key") + return self.raw_check_key(raw_privkey, pubkey_x, pubkey_y, curve) + + def raw_check_key(self, privkey, pubkey_x, pubkey_y, curve=None): + if curve is None: + curve = self.curve + elif type(curve) == str: + curve = OpenSSL.get_curve(curve) + else: + curve = curve + try: + key = OpenSSL.EC_KEY_new_by_curve_name(curve) + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + if privkey is not None: + priv_key = OpenSSL.BN_bin2bn(privkey, len(privkey), 0) + pub_key_x = OpenSSL.BN_bin2bn(pubkey_x, len(pubkey_x), 0) + pub_key_y = OpenSSL.BN_bin2bn(pubkey_y, len(pubkey_y), 0) + + if privkey is not None: + if (OpenSSL.EC_KEY_set_private_key(key, priv_key)) == 0: + raise Exception( + "[OpenSSL] EC_KEY_set_private_key FAIL ...") + + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_POINT_new(group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(key, pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + return 0 + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + OpenSSL.EC_POINT_free(pub_key) + if privkey is not None: + OpenSSL.BN_free(priv_key) + + def sign(self, inputb, digest_alg=OpenSSL.digest_ecdsa_sha1): + """ + Sign the input with ECDSA method and returns the signature + """ + try: + size = len(inputb) + buff = OpenSSL.malloc(inputb, size) + digest = OpenSSL.malloc(0, 64) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + md_ctx = OpenSSL.EVP_MD_CTX_new() + else: + md_ctx = OpenSSL.EVP_MD_CTX_create() + dgst_len = OpenSSL.pointer(OpenSSL.c_int(0)) + siglen = OpenSSL.pointer(OpenSSL.c_int(0)) + sig = OpenSSL.malloc(0, 151) + + key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + + priv_key = OpenSSL.BN_bin2bn(self.privkey, len(self.privkey), 0) + pub_key_x = OpenSSL.BN_bin2bn(self.pubkey_x, len(self.pubkey_x), 0) + pub_key_y = OpenSSL.BN_bin2bn(self.pubkey_y, len(self.pubkey_y), 0) + + if (OpenSSL.EC_KEY_set_private_key(key, priv_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...") + + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_POINT_new(group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(key, pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_new(md_ctx) + else: + OpenSSL.EVP_MD_CTX_init(md_ctx) + OpenSSL.EVP_DigestInit_ex(md_ctx, digest_alg(), None) + + if (OpenSSL.EVP_DigestUpdate(md_ctx, buff, size)) == 0: + raise Exception("[OpenSSL] EVP_DigestUpdate FAIL ...") + OpenSSL.EVP_DigestFinal_ex(md_ctx, digest, dgst_len) + OpenSSL.ECDSA_sign(0, digest, dgst_len.contents, sig, siglen, key) + if (OpenSSL.ECDSA_verify(0, digest, dgst_len.contents, sig, + siglen.contents, key)) != 1: + raise Exception("[OpenSSL] ECDSA_verify FAIL ...") + + return sig.raw[:siglen.contents.value] + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + OpenSSL.BN_free(priv_key) + OpenSSL.EC_POINT_free(pub_key) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_free(md_ctx) + else: + OpenSSL.EVP_MD_CTX_destroy(md_ctx) + pass + + def verify(self, sig, inputb, digest_alg=OpenSSL.digest_ecdsa_sha1): + """ + Verify the signature with the input and the local public key. + Returns a boolean + """ + try: + bsig = OpenSSL.malloc(sig, len(sig)) + binputb = OpenSSL.malloc(inputb, len(inputb)) + digest = OpenSSL.malloc(0, 64) + dgst_len = OpenSSL.pointer(OpenSSL.c_int(0)) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + md_ctx = OpenSSL.EVP_MD_CTX_new() + else: + md_ctx = OpenSSL.EVP_MD_CTX_create() + key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) + + if key == 0: + raise Exception("[OpenSSL] EC_KEY_new_by_curve_name FAIL ...") + + pub_key_x = OpenSSL.BN_bin2bn(self.pubkey_x, len(self.pubkey_x), 0) + pub_key_y = OpenSSL.BN_bin2bn(self.pubkey_y, len(self.pubkey_y), 0) + group = OpenSSL.EC_KEY_get0_group(key) + pub_key = OpenSSL.EC_POINT_new(group) + + if (OpenSSL.EC_POINT_set_affine_coordinates_GFp(group, pub_key, + pub_key_x, + pub_key_y, + 0)) == 0: + raise Exception( + "[OpenSSL] EC_POINT_set_affine_coordinates_GFp FAIL ...") + if (OpenSSL.EC_KEY_set_public_key(key, pub_key)) == 0: + raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") + if (OpenSSL.EC_KEY_check_key(key)) == 0: + raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_new(md_ctx) + else: + OpenSSL.EVP_MD_CTX_init(md_ctx) + OpenSSL.EVP_DigestInit_ex(md_ctx, digest_alg(), None) + if (OpenSSL.EVP_DigestUpdate(md_ctx, binputb, len(inputb))) == 0: + raise Exception("[OpenSSL] EVP_DigestUpdate FAIL ...") + + OpenSSL.EVP_DigestFinal_ex(md_ctx, digest, dgst_len) + ret = OpenSSL.ECDSA_verify( + 0, digest, dgst_len.contents, bsig, len(sig), key) + + if ret == -1: + return False # Fail to Check + else: + if ret == 0: + return False # Bad signature ! + else: + return True # Good + return False + + finally: + OpenSSL.EC_KEY_free(key) + OpenSSL.BN_free(pub_key_x) + OpenSSL.BN_free(pub_key_y) + OpenSSL.EC_POINT_free(pub_key) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_free(md_ctx) + else: + OpenSSL.EVP_MD_CTX_destroy(md_ctx) + + @staticmethod + def encrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'): + """ + Encrypt data with ECIES method using the public key of the recipient. + """ + curve, pubkey_x, pubkey_y, i = ECC._decode_pubkey(pubkey) + return ECC.raw_encrypt(data, pubkey_x, pubkey_y, curve=curve, + ephemcurve=ephemcurve, ciphername=ciphername) + + @staticmethod + def raw_encrypt(data, pubkey_x, pubkey_y, curve='sect283r1', + ephemcurve=None, ciphername='aes-256-cbc'): + if ephemcurve is None: + ephemcurve = curve + ephem = ECC(curve=ephemcurve) + key = sha512(ephem.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + key_e, key_m = key[:32], key[32:] + pubkey = ephem.get_pubkey() + iv = OpenSSL.rand(OpenSSL.get_cipher(ciphername).get_blocksize()) + ctx = Cipher(key_e, iv, 1, ciphername) + ciphertext = iv + pubkey + ctx.ciphering(data) + mac = hmac_sha256(key_m, ciphertext) + return ciphertext + mac + + def decrypt(self, data, ciphername='aes-256-cbc'): + """ + Decrypt data with ECIES method using the local private key + """ + blocksize = OpenSSL.get_cipher(ciphername).get_blocksize() + iv = data[:blocksize] + i = blocksize + curve, pubkey_x, pubkey_y, i2 = ECC._decode_pubkey(data[i:]) + i += i2 + ciphertext = data[i:len(data)-32] + i += len(ciphertext) + mac = data[i:] + key = sha512(self.raw_get_ecdh_key(pubkey_x, pubkey_y)).digest() + key_e, key_m = key[:32], key[32:] + if not equals(hmac_sha256(key_m, data[:len(data) - 32]), mac): + raise RuntimeError("Fail to verify data") + ctx = Cipher(key_e, iv, 0, ciphername) + return ctx.ciphering(ciphertext) diff --git a/src/lib/pyelliptic/hash.py b/src/lib/pyelliptic/hash.py new file mode 100644 index 00000000..fb910dd4 --- /dev/null +++ b/src/lib/pyelliptic/hash.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. + +from pyelliptic.openssl import OpenSSL + + +# For python3 +def _equals_bytes(a, b): + if len(a) != len(b): + return False + result = 0 + for x, y in zip(a, b): + result |= x ^ y + return result == 0 + + +def _equals_str(a, b): + if len(a) != len(b): + return False + result = 0 + for x, y in zip(a, b): + result |= ord(x) ^ ord(y) + return result == 0 + + +def equals(a, b): + if isinstance(a, str): + return _equals_str(a, b) + else: + return _equals_bytes(a, b) + + +def hmac_sha256(k, m): + """ + Compute the key and the message with HMAC SHA5256 + """ + key = OpenSSL.malloc(k, len(k)) + d = OpenSSL.malloc(m, len(m)) + md = OpenSSL.malloc(0, 32) + i = OpenSSL.pointer(OpenSSL.c_int(0)) + OpenSSL.HMAC(OpenSSL.EVP_sha256(), key, len(k), d, len(m), md, i) + return md.raw + + +def hmac_sha512(k, m): + """ + Compute the key and the message with HMAC SHA512 + """ + key = OpenSSL.malloc(k, len(k)) + d = OpenSSL.malloc(m, len(m)) + md = OpenSSL.malloc(0, 64) + i = OpenSSL.pointer(OpenSSL.c_int(0)) + OpenSSL.HMAC(OpenSSL.EVP_sha512(), key, len(k), d, len(m), md, i) + return md.raw + + +def pbkdf2(password, salt=None, i=10000, keylen=64): + if salt is None: + salt = OpenSSL.rand(8) + p_password = OpenSSL.malloc(password, len(password)) + p_salt = OpenSSL.malloc(salt, len(salt)) + output = OpenSSL.malloc(0, keylen) + OpenSSL.PKCS5_PBKDF2_HMAC(p_password, len(password), p_salt, + len(p_salt), i, OpenSSL.EVP_sha256(), + keylen, output) + return salt, output.raw diff --git a/src/lib/pyelliptic/openssl.py b/src/lib/pyelliptic/openssl.py new file mode 100644 index 00000000..ea8136dd --- /dev/null +++ b/src/lib/pyelliptic/openssl.py @@ -0,0 +1,516 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (C) 2011 Yann GUIBET +# See LICENSE for details. +# +# Software slightly changed by Jonathan Warren + +import sys +import ctypes + +OpenSSL = None + + +class CipherName: + def __init__(self, name, pointer, blocksize): + self._name = name + self._pointer = pointer + self._blocksize = blocksize + + def __str__(self): + return "Cipher : " + self._name + " | Blocksize : " + str(self._blocksize) + " | Fonction pointer : " + str(self._pointer) + + def get_pointer(self): + return self._pointer() + + def get_name(self): + return self._name + + def get_blocksize(self): + return self._blocksize + + +def get_version(library): + version = None + hexversion = None + cflags = None + try: + #OpenSSL 1.1 + OPENSSL_VERSION = 0 + OPENSSL_CFLAGS = 1 + library.OpenSSL_version.argtypes = [ctypes.c_int] + library.OpenSSL_version.restype = ctypes.c_char_p + version = library.OpenSSL_version(OPENSSL_VERSION) + cflags = library.OpenSSL_version(OPENSSL_CFLAGS) + library.OpenSSL_version_num.restype = ctypes.c_long + hexversion = library.OpenSSL_version_num() + except AttributeError: + try: + #OpenSSL 1.0 + SSLEAY_VERSION = 0 + SSLEAY_CFLAGS = 2 + library.SSLeay.restype = ctypes.c_long + library.SSLeay_version.restype = ctypes.c_char_p + library.SSLeay_version.argtypes = [ctypes.c_int] + version = library.SSLeay_version(SSLEAY_VERSION) + cflags = library.SSLeay_version(SSLEAY_CFLAGS) + hexversion = library.SSLeay() + except AttributeError: + #raise NotImplementedError('Cannot determine version of this OpenSSL library.') + pass + return (version, hexversion, cflags) + + +class _OpenSSL: + """ + Wrapper for OpenSSL using ctypes + """ + def __init__(self, library): + """ + Build the wrapper + """ + self._lib = ctypes.CDLL(library) + self._version, self._hexversion, self._cflags = get_version(self._lib) + self._libreSSL = self._version.startswith("LibreSSL") + + self.pointer = ctypes.pointer + self.c_int = ctypes.c_int + self.byref = ctypes.byref + self.create_string_buffer = ctypes.create_string_buffer + + self.BN_new = self._lib.BN_new + self.BN_new.restype = ctypes.c_void_p + self.BN_new.argtypes = [] + + self.BN_free = self._lib.BN_free + self.BN_free.restype = None + self.BN_free.argtypes = [ctypes.c_void_p] + + self.BN_num_bits = self._lib.BN_num_bits + self.BN_num_bits.restype = ctypes.c_int + self.BN_num_bits.argtypes = [ctypes.c_void_p] + + self.BN_bn2bin = self._lib.BN_bn2bin + self.BN_bn2bin.restype = ctypes.c_int + self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_bin2bn = self._lib.BN_bin2bn + self.BN_bin2bn.restype = ctypes.c_void_p + self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p] + + self.EC_KEY_free = self._lib.EC_KEY_free + self.EC_KEY_free.restype = None + self.EC_KEY_free.argtypes = [ctypes.c_void_p] + + self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name + self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p + self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int] + + self.EC_KEY_generate_key = self._lib.EC_KEY_generate_key + self.EC_KEY_generate_key.restype = ctypes.c_int + self.EC_KEY_generate_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_check_key = self._lib.EC_KEY_check_key + self.EC_KEY_check_key.restype = ctypes.c_int + self.EC_KEY_check_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_get0_private_key = self._lib.EC_KEY_get0_private_key + self.EC_KEY_get0_private_key.restype = ctypes.c_void_p + self.EC_KEY_get0_private_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_get0_public_key = self._lib.EC_KEY_get0_public_key + self.EC_KEY_get0_public_key.restype = ctypes.c_void_p + self.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p] + + self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group + self.EC_KEY_get0_group.restype = ctypes.c_void_p + self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p] + + self.EC_POINT_get_affine_coordinates_GFp = self._lib.EC_POINT_get_affine_coordinates_GFp + self.EC_POINT_get_affine_coordinates_GFp.restype = ctypes.c_int + self.EC_POINT_get_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key + self.EC_KEY_set_public_key.restype = ctypes.c_int + self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + self.EC_KEY_set_group = self._lib.EC_KEY_set_group + self.EC_KEY_set_group.restype = ctypes.c_int + self.EC_KEY_set_group.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.EC_POINT_set_affine_coordinates_GFp = self._lib.EC_POINT_set_affine_coordinates_GFp + self.EC_POINT_set_affine_coordinates_GFp.restype = ctypes.c_int + self.EC_POINT_set_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_POINT_new = self._lib.EC_POINT_new + self.EC_POINT_new.restype = ctypes.c_void_p + self.EC_POINT_new.argtypes = [ctypes.c_void_p] + + self.EC_POINT_free = self._lib.EC_POINT_free + self.EC_POINT_free.restype = None + self.EC_POINT_free.argtypes = [ctypes.c_void_p] + + self.BN_CTX_free = self._lib.BN_CTX_free + self.BN_CTX_free.restype = None + self.BN_CTX_free.argtypes = [ctypes.c_void_p] + + self.EC_POINT_mul = self._lib.EC_POINT_mul + self.EC_POINT_mul.restype = None + self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key + self.EC_KEY_set_private_key.restype = ctypes.c_int + self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, + ctypes.c_void_p] + + if self._hexversion >= 0x10100000 and not self._libreSSL: + self.EC_KEY_OpenSSL = self._lib.EC_KEY_OpenSSL + self._lib.EC_KEY_OpenSSL.restype = ctypes.c_void_p + self._lib.EC_KEY_OpenSSL.argtypes = [] + + self.EC_KEY_set_method = self._lib.EC_KEY_set_method + self._lib.EC_KEY_set_method.restype = ctypes.c_int + self._lib.EC_KEY_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + else: + self.ECDH_OpenSSL = self._lib.ECDH_OpenSSL + self._lib.ECDH_OpenSSL.restype = ctypes.c_void_p + self._lib.ECDH_OpenSSL.argtypes = [] + + self.ECDH_set_method = self._lib.ECDH_set_method + self._lib.ECDH_set_method.restype = ctypes.c_int + self._lib.ECDH_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.BN_CTX_new = self._lib.BN_CTX_new + self._lib.BN_CTX_new.restype = ctypes.c_void_p + self._lib.BN_CTX_new.argtypes = [] + + self.ECDH_compute_key = self._lib.ECDH_compute_key + self.ECDH_compute_key.restype = ctypes.c_int + self.ECDH_compute_key.argtypes = [ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_CipherInit_ex = self._lib.EVP_CipherInit_ex + self.EVP_CipherInit_ex.restype = ctypes.c_int + self.EVP_CipherInit_ex.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_CIPHER_CTX_new = self._lib.EVP_CIPHER_CTX_new + self.EVP_CIPHER_CTX_new.restype = ctypes.c_void_p + self.EVP_CIPHER_CTX_new.argtypes = [] + + # Cipher + self.EVP_aes_128_cfb128 = self._lib.EVP_aes_128_cfb128 + self.EVP_aes_128_cfb128.restype = ctypes.c_void_p + self.EVP_aes_128_cfb128.argtypes = [] + + self.EVP_aes_256_cfb128 = self._lib.EVP_aes_256_cfb128 + self.EVP_aes_256_cfb128.restype = ctypes.c_void_p + self.EVP_aes_256_cfb128.argtypes = [] + + self.EVP_aes_128_cbc = self._lib.EVP_aes_128_cbc + self.EVP_aes_128_cbc.restype = ctypes.c_void_p + self.EVP_aes_128_cbc.argtypes = [] + + self.EVP_aes_256_cbc = self._lib.EVP_aes_256_cbc + self.EVP_aes_256_cbc.restype = ctypes.c_void_p + self.EVP_aes_256_cbc.argtypes = [] + + #self.EVP_aes_128_ctr = self._lib.EVP_aes_128_ctr + #self.EVP_aes_128_ctr.restype = ctypes.c_void_p + #self.EVP_aes_128_ctr.argtypes = [] + + #self.EVP_aes_256_ctr = self._lib.EVP_aes_256_ctr + #self.EVP_aes_256_ctr.restype = ctypes.c_void_p + #self.EVP_aes_256_ctr.argtypes = [] + + self.EVP_aes_128_ofb = self._lib.EVP_aes_128_ofb + self.EVP_aes_128_ofb.restype = ctypes.c_void_p + self.EVP_aes_128_ofb.argtypes = [] + + self.EVP_aes_256_ofb = self._lib.EVP_aes_256_ofb + self.EVP_aes_256_ofb.restype = ctypes.c_void_p + self.EVP_aes_256_ofb.argtypes = [] + + self.EVP_bf_cbc = self._lib.EVP_bf_cbc + self.EVP_bf_cbc.restype = ctypes.c_void_p + self.EVP_bf_cbc.argtypes = [] + + self.EVP_bf_cfb64 = self._lib.EVP_bf_cfb64 + self.EVP_bf_cfb64.restype = ctypes.c_void_p + self.EVP_bf_cfb64.argtypes = [] + + self.EVP_rc4 = self._lib.EVP_rc4 + self.EVP_rc4.restype = ctypes.c_void_p + self.EVP_rc4.argtypes = [] + + if self._hexversion >= 0x10100000 and not self._libreSSL: + self.EVP_CIPHER_CTX_reset = self._lib.EVP_CIPHER_CTX_reset + self.EVP_CIPHER_CTX_reset.restype = ctypes.c_int + self.EVP_CIPHER_CTX_reset.argtypes = [ctypes.c_void_p] + else: + self.EVP_CIPHER_CTX_cleanup = self._lib.EVP_CIPHER_CTX_cleanup + self.EVP_CIPHER_CTX_cleanup.restype = ctypes.c_int + self.EVP_CIPHER_CTX_cleanup.argtypes = [ctypes.c_void_p] + + self.EVP_CIPHER_CTX_free = self._lib.EVP_CIPHER_CTX_free + self.EVP_CIPHER_CTX_free.restype = None + self.EVP_CIPHER_CTX_free.argtypes = [ctypes.c_void_p] + + self.EVP_CipherUpdate = self._lib.EVP_CipherUpdate + self.EVP_CipherUpdate.restype = ctypes.c_int + self.EVP_CipherUpdate.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] + + self.EVP_CipherFinal_ex = self._lib.EVP_CipherFinal_ex + self.EVP_CipherFinal_ex.restype = ctypes.c_int + self.EVP_CipherFinal_ex.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_DigestInit = self._lib.EVP_DigestInit + self.EVP_DigestInit.restype = ctypes.c_int + self._lib.EVP_DigestInit.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_DigestInit_ex = self._lib.EVP_DigestInit_ex + self.EVP_DigestInit_ex.restype = ctypes.c_int + self._lib.EVP_DigestInit_ex.argtypes = 3 * [ctypes.c_void_p] + + self.EVP_DigestUpdate = self._lib.EVP_DigestUpdate + self.EVP_DigestUpdate.restype = ctypes.c_int + self.EVP_DigestUpdate.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_int] + + self.EVP_DigestFinal = self._lib.EVP_DigestFinal + self.EVP_DigestFinal.restype = ctypes.c_int + self.EVP_DigestFinal.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_DigestFinal_ex = self._lib.EVP_DigestFinal_ex + self.EVP_DigestFinal_ex.restype = ctypes.c_int + self.EVP_DigestFinal_ex.argtypes = [ctypes.c_void_p, + ctypes.c_void_p, ctypes.c_void_p] + + self.ECDSA_sign = self._lib.ECDSA_sign + self.ECDSA_sign.restype = ctypes.c_int + self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] + + self.ECDSA_verify = self._lib.ECDSA_verify + self.ECDSA_verify.restype = ctypes.c_int + self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] + + if self._hexversion >= 0x10100000 and not self._libreSSL: + self.EVP_MD_CTX_new = self._lib.EVP_MD_CTX_new + self.EVP_MD_CTX_new.restype = ctypes.c_void_p + self.EVP_MD_CTX_new.argtypes = [] + + self.EVP_MD_CTX_reset = self._lib.EVP_MD_CTX_reset + self.EVP_MD_CTX_reset.restype = None + self.EVP_MD_CTX_reset.argtypes = [ctypes.c_void_p] + + self.EVP_MD_CTX_free = self._lib.EVP_MD_CTX_free + self.EVP_MD_CTX_free.restype = None + self.EVP_MD_CTX_free.argtypes = [ctypes.c_void_p] + + self.EVP_sha1 = self._lib.EVP_sha1 + self.EVP_sha1.restype = ctypes.c_void_p + self.EVP_sha1.argtypes = [] + + self.digest_ecdsa_sha1 = self.EVP_sha1 + else: + self.EVP_MD_CTX_create = self._lib.EVP_MD_CTX_create + self.EVP_MD_CTX_create.restype = ctypes.c_void_p + self.EVP_MD_CTX_create.argtypes = [] + + self.EVP_MD_CTX_init = self._lib.EVP_MD_CTX_init + self.EVP_MD_CTX_init.restype = None + self.EVP_MD_CTX_init.argtypes = [ctypes.c_void_p] + + self.EVP_MD_CTX_destroy = self._lib.EVP_MD_CTX_destroy + self.EVP_MD_CTX_destroy.restype = None + self.EVP_MD_CTX_destroy.argtypes = [ctypes.c_void_p] + + self.EVP_ecdsa = self._lib.EVP_ecdsa + self._lib.EVP_ecdsa.restype = ctypes.c_void_p + self._lib.EVP_ecdsa.argtypes = [] + + self.digest_ecdsa_sha1 = self.EVP_ecdsa + + self.RAND_bytes = self._lib.RAND_bytes + self.RAND_bytes.restype = ctypes.c_int + self.RAND_bytes.argtypes = [ctypes.c_void_p, ctypes.c_int] + + self.EVP_sha256 = self._lib.EVP_sha256 + self.EVP_sha256.restype = ctypes.c_void_p + self.EVP_sha256.argtypes = [] + + self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey + self.i2o_ECPublicKey.restype = ctypes.c_void_p + self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + + self.EVP_sha512 = self._lib.EVP_sha512 + self.EVP_sha512.restype = ctypes.c_void_p + self.EVP_sha512.argtypes = [] + + self.HMAC = self._lib.HMAC + self.HMAC.restype = ctypes.c_void_p + self.HMAC.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p] + + try: + self.PKCS5_PBKDF2_HMAC = self._lib.PKCS5_PBKDF2_HMAC + except: + # The above is not compatible with all versions of OSX. + self.PKCS5_PBKDF2_HMAC = self._lib.PKCS5_PBKDF2_HMAC_SHA1 + + self.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int + self.PKCS5_PBKDF2_HMAC.argtypes = [ctypes.c_void_p, ctypes.c_int, + ctypes.c_void_p, ctypes.c_int, + ctypes.c_int, ctypes.c_void_p, + ctypes.c_int, ctypes.c_void_p] + + self._set_ciphers() + self._set_curves() + + def _set_ciphers(self): + self.cipher_algo = { + 'aes-128-cbc': CipherName('aes-128-cbc', self.EVP_aes_128_cbc, 16), + 'aes-256-cbc': CipherName('aes-256-cbc', self.EVP_aes_256_cbc, 16), + 'aes-128-cfb': CipherName('aes-128-cfb', self.EVP_aes_128_cfb128, 16), + 'aes-256-cfb': CipherName('aes-256-cfb', self.EVP_aes_256_cfb128, 16), + 'aes-128-ofb': CipherName('aes-128-ofb', self._lib.EVP_aes_128_ofb, 16), + 'aes-256-ofb': CipherName('aes-256-ofb', self._lib.EVP_aes_256_ofb, 16), + #'aes-128-ctr': CipherName('aes-128-ctr', self._lib.EVP_aes_128_ctr, 16), + #'aes-256-ctr': CipherName('aes-256-ctr', self._lib.EVP_aes_256_ctr, 16), + 'bf-cfb': CipherName('bf-cfb', self.EVP_bf_cfb64, 8), + 'bf-cbc': CipherName('bf-cbc', self.EVP_bf_cbc, 8), + 'rc4': CipherName('rc4', self.EVP_rc4, 128), # 128 is the initialisation size not block size + } + + def _set_curves(self): + self.curves = { + 'secp112r1': 704, + 'secp112r2': 705, + 'secp128r1': 706, + 'secp128r2': 707, + 'secp160k1': 708, + 'secp160r1': 709, + 'secp160r2': 710, + 'secp192k1': 711, + 'secp224k1': 712, + 'secp224r1': 713, + 'secp256k1': 714, + 'secp384r1': 715, + 'secp521r1': 716, + 'sect113r1': 717, + 'sect113r2': 718, + 'sect131r1': 719, + 'sect131r2': 720, + 'sect163k1': 721, + 'sect163r1': 722, + 'sect163r2': 723, + 'sect193r1': 724, + 'sect193r2': 725, + 'sect233k1': 726, + 'sect233r1': 727, + 'sect239k1': 728, + 'sect283k1': 729, + 'sect283r1': 730, + 'sect409k1': 731, + 'sect409r1': 732, + 'sect571k1': 733, + 'sect571r1': 734, + } + + def BN_num_bytes(self, x): + """ + returns the length of a BN (OpenSSl API) + """ + return int((self.BN_num_bits(x) + 7) / 8) + + def get_cipher(self, name): + """ + returns the OpenSSL cipher instance + """ + if name not in self.cipher_algo: + raise Exception("Unknown cipher") + return self.cipher_algo[name] + + def get_curve(self, name): + """ + returns the id of a elliptic curve + """ + if name not in self.curves: + raise Exception("Unknown curve") + return self.curves[name] + + def get_curve_by_id(self, id): + """ + returns the name of a elliptic curve with his id + """ + res = None + for i in self.curves: + if self.curves[i] == id: + res = i + break + if res is None: + raise Exception("Unknown curve") + return res + + def rand(self, size): + """ + OpenSSL random function + """ + buffer = self.malloc(0, size) + # This pyelliptic library, by default, didn't check the return value of RAND_bytes. It is + # evidently possible that it returned an error and not-actually-random data. However, in + # tests on various operating systems, while generating hundreds of gigabytes of random + # strings of various sizes I could not get an error to occur. Also Bitcoin doesn't check + # the return value of RAND_bytes either. + # Fixed in Bitmessage version 0.4.2 (in source code on 2013-10-13) + while self.RAND_bytes(buffer, size) != 1: + import time + time.sleep(1) + return buffer.raw + + def malloc(self, data, size): + """ + returns a create_string_buffer (ctypes) + """ + buffer = None + if data != 0: + if sys.version_info.major == 3 and isinstance(data, type('')): + data = data.encode() + buffer = self.create_string_buffer(data, size) + else: + buffer = self.create_string_buffer(size) + return buffer + +def loadOpenSSL(): + import logging + global OpenSSL + try: + if sys.platform.startswith("win"): + dll_path = os.path.normpath(os.path.dirname(__file__) + "/../opensslVerify/" + "libeay32.dll") + elif sys.platform == "cygwin": + dll_path = "/bin/cygcrypto-1.0.0.dll" + elif os.path.isfile("../lib/libcrypto.so"): # ZeroBundle OSX + dll_path = "../lib/libcrypto.so" + else: + dll_path = "/usr/local/ssl/lib/libcrypto.so" + ssl = _OpenSSL(dll_path) + assert ssl + except Exception, err: + ssl = _OpenSSL(ctypes.util.find_library('ssl.so.1.0') or ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32') + OpenSSL = ssl + logging.debug("pyelliptic loaded: %s", ssl._lib) + +loadOpenSSL() diff --git a/src/lib/rsa/LICENSE b/src/lib/rsa/LICENSE new file mode 100644 index 00000000..da76c9d7 --- /dev/null +++ b/src/lib/rsa/LICENSE @@ -0,0 +1,13 @@ +Copyright 2011 Sybren A. Stüvel + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/src/lib/rsa/__init__.py b/src/lib/rsa/__init__.py new file mode 100644 index 00000000..c572c06b --- /dev/null +++ b/src/lib/rsa/__init__.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""RSA module + +Module for calculating large primes, and RSA encryption, decryption, signing +and verification. Includes generating public and private keys. + +WARNING: this implementation does not use random padding, compression of the +cleartext input to prevent repetitions, or other common security improvements. +Use with care. + +""" + +from rsa.key import newkeys, PrivateKey, PublicKey +from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \ + VerificationError + +__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly" +__date__ = "2016-03-29" +__version__ = '3.4.2' + +# Do doctest if we're run directly +if __name__ == "__main__": + import doctest + + doctest.testmod() + +__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey', + 'PrivateKey', 'DecryptionError', 'VerificationError'] diff --git a/src/lib/rsa/_compat.py b/src/lib/rsa/_compat.py new file mode 100644 index 00000000..93393d9f --- /dev/null +++ b/src/lib/rsa/_compat.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python compatibility wrappers.""" + +from __future__ import absolute_import + +import sys +from struct import pack + +try: + MAX_INT = sys.maxsize +except AttributeError: + MAX_INT = sys.maxint + +MAX_INT64 = (1 << 63) - 1 +MAX_INT32 = (1 << 31) - 1 +MAX_INT16 = (1 << 15) - 1 + +# Determine the word size of the processor. +if MAX_INT == MAX_INT64: + # 64-bit processor. + MACHINE_WORD_SIZE = 64 +elif MAX_INT == MAX_INT32: + # 32-bit processor. + MACHINE_WORD_SIZE = 32 +else: + # Else we just assume 64-bit processor keeping up with modern times. + MACHINE_WORD_SIZE = 64 + +try: + # < Python3 + unicode_type = unicode +except NameError: + # Python3. + unicode_type = str + +# Fake byte literals. +if str is unicode_type: + def byte_literal(s): + return s.encode('latin1') +else: + def byte_literal(s): + return s + +# ``long`` is no more. Do type detection using this instead. +try: + integer_types = (int, long) +except NameError: + integer_types = (int,) + +b = byte_literal + +# To avoid calling b() multiple times in tight loops. +ZERO_BYTE = b('\x00') +EMPTY_BYTE = b('') + + +def is_bytes(obj): + """ + Determines whether the given value is a byte string. + + :param obj: + The value to test. + :returns: + ``True`` if ``value`` is a byte string; ``False`` otherwise. + """ + return isinstance(obj, bytes) + + +def is_integer(obj): + """ + Determines whether the given value is an integer. + + :param obj: + The value to test. + :returns: + ``True`` if ``value`` is an integer; ``False`` otherwise. + """ + return isinstance(obj, integer_types) + + +def byte(num): + """ + Converts a number between 0 and 255 (both inclusive) to a base-256 (byte) + representation. + + Use it as a replacement for ``chr`` where you are expecting a byte + because this will work on all current versions of Python:: + + :param num: + An unsigned integer between 0 and 255 (both inclusive). + :returns: + A single byte. + """ + return pack("B", num) + + +def get_word_alignment(num, force_arch=64, + _machine_word_size=MACHINE_WORD_SIZE): + """ + Returns alignment details for the given number based on the platform + Python is running on. + + :param num: + Unsigned integral number. + :param force_arch: + If you don't want to use 64-bit unsigned chunks, set this to + anything other than 64. 32-bit chunks will be preferred then. + Default 64 will be used when on a 64-bit machine. + :param _machine_word_size: + (Internal) The machine word size used for alignment. + :returns: + 4-tuple:: + + (word_bits, word_bytes, + max_uint, packing_format_type) + """ + max_uint64 = 0xffffffffffffffff + max_uint32 = 0xffffffff + max_uint16 = 0xffff + max_uint8 = 0xff + + if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32: + # 64-bit unsigned integer. + return 64, 8, max_uint64, "Q" + elif num > max_uint16: + # 32-bit unsigned integer + return 32, 4, max_uint32, "L" + elif num > max_uint8: + # 16-bit unsigned integer. + return 16, 2, max_uint16, "H" + else: + # 8-bit unsigned integer. + return 8, 1, max_uint8, "B" diff --git a/src/lib/rsa/_version133.py b/src/lib/rsa/_version133.py new file mode 100644 index 00000000..ff03b45f --- /dev/null +++ b/src/lib/rsa/_version133.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Deprecated version of the RSA module + +.. deprecated:: 2.0 + + This submodule is deprecated and will be completely removed as of version 4.0. + +Module for calculating large primes, and RSA encryption, decryption, +signing and verification. Includes generating public and private keys. + +WARNING: this code implements the mathematics of RSA. It is not suitable for +real-world secure cryptography purposes. It has not been reviewed by a security +expert. It does not include padding of data. There are many ways in which the +output of this module, when used without any modification, can be sucessfully +attacked. +""" + +__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer" +__date__ = "2010-02-05" +__version__ = '1.3.3' + +# NOTE: Python's modulo can return negative numbers. We compensate for +# this behaviour using the abs() function + +try: + import cPickle as pickle +except ImportError: + import pickle +from pickle import dumps, loads +import base64 +import math +import os +import random +import sys +import types +import zlib + +from rsa._compat import byte + +# Display a warning that this insecure version is imported. +import warnings +warnings.warn('Insecure version of the RSA module is imported as %s, be careful' + % __name__) +warnings.warn('This submodule is deprecated and will be completely removed as of version 4.0.', + DeprecationWarning) + + +def gcd(p, q): + """Returns the greatest common divisor of p and q + + + >>> gcd(42, 6) + 6 + """ + if p 0: + string = "%s%s" % (byte(number & 0xFF), string) + number /= 256 + + return string + +def fast_exponentiation(a, p, n): + """Calculates r = a^p mod n + """ + result = a % n + remainders = [] + while p != 1: + remainders.append(p & 1) + p = p >> 1 + while remainders: + rem = remainders.pop() + result = ((a ** rem) * result ** 2) % n + return result + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits rounded up + to whole bytes""" + + nbytes = ceil(nbits/8.) + randomdata = os.urandom(nbytes) + return bytes2int(randomdata) + +def ceil(x): + """ceil(x) -> int(math.ceil(x))""" + + return int(math.ceil(x)) + +def randint(minvalue, maxvalue): + """Returns a random integer x with minvalue <= x <= maxvalue""" + + # Safety - get a lot of random data even if the range is fairly + # small + min_nbits = 32 + + # The range of the random numbers we need to generate + range = maxvalue - minvalue + + # Which is this number of bytes + rangebytes = ceil(math.log(range, 2) / 8.) + + # Convert to bits, but make sure it's always at least min_nbits*2 + rangebits = max(rangebytes * 8, min_nbits * 2) + + # Take a random number of bits between min_nbits and rangebits + nbits = random.randint(min_nbits, rangebits) + + return (read_random_int(nbits) % range) + minvalue + +def fermat_little_theorem(p): + """Returns 1 if p may be prime, and something else if p definitely + is not prime""" + + a = randint(1, p-1) + return fast_exponentiation(a, p-1, p) + +def jacobi(a, b): + """Calculates the value of the Jacobi symbol (a/b) + """ + + if a % b == 0: + return 0 + result = 1 + while a > 1: + if a & 1: + if ((a-1)*(b-1) >> 2) & 1: + result = -result + b, a = a, b % a + else: + if ((b ** 2 - 1) >> 3) & 1: + result = -result + a = a >> 1 + return result + +def jacobi_witness(x, n): + """Returns False if n is an Euler pseudo-prime with base x, and + True otherwise. + """ + + j = jacobi(x, n) % n + f = fast_exponentiation(x, (n-1)/2, n) + + if j == f: return False + return True + +def randomized_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or + prime (which is incorrect with error probability 2**-k) + + Returns False if the number if composite, and True if it's + probably prime. + """ + + q = 0.5 # Property of the jacobi_witness function + + # t = int(math.ceil(k / math.log(1/q, 2))) + t = ceil(k / math.log(1/q, 2)) + for i in range(t+1): + x = randint(1, n-1) + if jacobi_witness(x, n): return False + + return True + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + """ + + """ + if not fermat_little_theorem(number) == 1: + # Not prime, according to Fermat's little theorem + return False + """ + + if randomized_primality_testing(number, 5): + # Prime, according to Jacobi + return True + + # Not prime + return False + + +def getprime(nbits): + """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In + other words: nbits is rounded up to whole bytes. + """ + + nbytes = int(math.ceil(nbits/8.)) + + while True: + integer = read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if is_prime(integer): break + + # Retry if not prime + + return integer + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + """ + + d = gcd(a, b) + return (d == 1) + +def find_p_q(nbits): + """Returns a tuple of two different primes of nbits bits""" + + p = getprime(nbits) + while True: + q = getprime(nbits) + if not q == p: break + + return (p, q) + +def extended_euclid_gcd(a, b): + """Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb + """ + + if b == 0: + return (a, 1, 0) + + q = abs(a % b) + r = long(a / b) + (d, k, l) = extended_euclid_gcd(b, q) + + return (d, l, k - l*r) + +# Main function: calculate encryption and decryption keys +def calculate_keys(p, q, nbits): + """Calculates an encryption and a decryption key for p and q, and + returns them as a tuple (e, d)""" + + n = p * q + phi_n = (p-1) * (q-1) + + while True: + # Make sure e has enough bits so we ensure "wrapping" through + # modulo n + e = getprime(max(8, nbits/2)) + if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break + + (d, i, j) = extended_euclid_gcd(e, phi_n) + + if not d == 1: + raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) + + if not (e * i) % phi_n == 1: + raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) + + return (e, i) + + +def gen_keys(nbits): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + """ + + while True: + (p, q) = find_p_q(nbits) + (e, d) = calculate_keys(p, q, nbits) + + # For some reason, d is sometimes negative. We don't know how + # to fix it (yet), so we keep trying until everything is shiny + if d > 0: break + + return (p, q, e, d) + +def gen_pubpriv_keys(nbits): + """Generates public and private keys, and returns them as (pub, + priv). + + The public key consists of a dict {e: ..., , n: ....). The private + key consists of a dict {d: ...., p: ...., q: ....). + """ + + (p, q, e, d) = gen_keys(nbits) + + return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo + n""" + + if type(message) is types.IntType: + return encrypt_int(long(message), ekey, n) + + if not type(message) is types.LongType: + raise TypeError("You must pass a long or an int") + + if message > 0 and \ + math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)): + raise OverflowError("The message is too long") + + return fast_exponentiation(message, ekey, n) + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working + modulo n""" + + return encrypt_int(cyphertext, dkey, n) + +def sign_int(message, dkey, n): + """Signs 'message' using key 'dkey', working modulo n""" + + return decrypt_int(message, dkey, n) + +def verify_int(signed, ekey, n): + """verifies 'signed' using key 'ekey', working modulo n""" + + return encrypt_int(signed, ekey, n) + +def picklechops(chops): + """Pickles and base64encodes it's argument chops""" + + value = zlib.compress(dumps(chops)) + encoded = base64.encodestring(value) + return encoded.strip() + +def unpicklechops(string): + """base64decodes and unpickes it's argument string into chops""" + + return loads(zlib.decompress(base64.decodestring(string))) + +def chopstring(message, key, n, funcref): + """Splits 'message' into chops that are at most as long as n, + converts these into integers, and calls funcref(integer, key, n) + for each chop. + + Used by 'encrypt' and 'sign'. + """ + + msglen = len(message) + mbits = msglen * 8 + nbits = int(math.floor(math.log(n, 2))) + nbytes = nbits / 8 + blocks = msglen / nbytes + + if msglen % nbytes > 0: + blocks += 1 + + cypher = [] + + for bindex in range(blocks): + offset = bindex * nbytes + block = message[offset:offset+nbytes] + value = bytes2int(block) + cypher.append(funcref(value, key, n)) + + return picklechops(cypher) + +def gluechops(chops, key, n, funcref): + """Glues chops back together into a string. calls + funcref(integer, key, n) for each chop. + + Used by 'decrypt' and 'verify'. + """ + message = "" + + chops = unpicklechops(chops) + + for cpart in chops: + mpart = funcref(cpart, key, n) + message += int2bytes(mpart) + + return message + +def encrypt(message, key): + """Encrypts a string 'message' with the public key 'key'""" + + return chopstring(message, key['e'], key['n'], encrypt_int) + +def sign(message, key): + """Signs a string 'message' with the private key 'key'""" + + return chopstring(message, key['d'], key['p']*key['q'], decrypt_int) + +def decrypt(cypher, key): + """Decrypts a cypher with the private key 'key'""" + + return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) + +def verify(cypher, key): + """Verifies a cypher with the public key 'key'""" + + return gluechops(cypher, key['e'], key['n'], encrypt_int) + +# Do doctest if we're not imported +if __name__ == "__main__": + import doctest + doctest.testmod() + +__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"] + diff --git a/src/lib/rsa/_version200.py b/src/lib/rsa/_version200.py new file mode 100644 index 00000000..1a169494 --- /dev/null +++ b/src/lib/rsa/_version200.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Deprecated version of the RSA module + +.. deprecated:: 3.0 + + This submodule is deprecated and will be completely removed as of version 4.0. + +""" + +__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead" +__date__ = "2010-02-08" +__version__ = '2.0' + +import math +import os +import random +import sys +import types +from rsa._compat import byte + +# Display a warning that this insecure version is imported. +import warnings +warnings.warn('Insecure version of the RSA module is imported as %s' % __name__) +warnings.warn('This submodule is deprecated and will be completely removed as of version 4.0.', + DeprecationWarning) + + +def bit_size(number): + """Returns the number of bits required to hold a specific long number""" + + return int(math.ceil(math.log(number,2))) + +def gcd(p, q): + """Returns the greatest common divisor of p and q + >>> gcd(48, 180) + 12 + """ + # Iterateive Version is faster and uses much less stack space + while q != 0: + if p < q: (p,q) = (q,p) + (p,q) = (q, p % q) + return p + + +def bytes2int(bytes): + r"""Converts a list of bytes or a string to an integer + """ + + if not (type(bytes) is types.ListType or type(bytes) is types.StringType): + raise TypeError("You must pass a string or a list") + + # Convert byte stream to integer + integer = 0 + for byte in bytes: + integer *= 256 + if type(byte) is types.StringType: byte = ord(byte) + integer += byte + + return integer + +def int2bytes(number): + """ + Converts a number to a string of bytes + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + string = "" + + while number > 0: + string = "%s%s" % (byte(number & 0xFF), string) + number /= 256 + + return string + +def to64(number): + """Converts a number in the range of 0 to 63 into base 64 digit + character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'. + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + if 0 <= number <= 9: #00-09 translates to '0' - '9' + return byte(number + 48) + + if 10 <= number <= 35: + return byte(number + 55) #10-35 translates to 'A' - 'Z' + + if 36 <= number <= 61: + return byte(number + 61) #36-61 translates to 'a' - 'z' + + if number == 62: # 62 translates to '-' (minus) + return byte(45) + + if number == 63: # 63 translates to '_' (underscore) + return byte(95) + + raise ValueError('Invalid Base64 value: %i' % number) + + +def from64(number): + """Converts an ordinal character value in the range of + 0-9,A-Z,a-z,-,_ to a number in the range of 0-63. + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + if 48 <= number <= 57: #ord('0') - ord('9') translates to 0-9 + return(number - 48) + + if 65 <= number <= 90: #ord('A') - ord('Z') translates to 10-35 + return(number - 55) + + if 97 <= number <= 122: #ord('a') - ord('z') translates to 36-61 + return(number - 61) + + if number == 45: #ord('-') translates to 62 + return(62) + + if number == 95: #ord('_') translates to 63 + return(63) + + raise ValueError('Invalid Base64 value: %i' % number) + + +def int2str64(number): + """Converts a number to a string of base64 encoded characters in + the range of '0'-'9','A'-'Z,'a'-'z','-','_'. + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + string = "" + + while number > 0: + string = "%s%s" % (to64(number & 0x3F), string) + number /= 64 + + return string + + +def str642int(string): + """Converts a base64 encoded string into an integer. + The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_' + """ + + if not (type(string) is types.ListType or type(string) is types.StringType): + raise TypeError("You must pass a string or a list") + + integer = 0 + for byte in string: + integer *= 64 + if type(byte) is types.StringType: byte = ord(byte) + integer += from64(byte) + + return integer + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits rounded up + to whole bytes""" + + nbytes = int(math.ceil(nbits/8.)) + randomdata = os.urandom(nbytes) + return bytes2int(randomdata) + +def randint(minvalue, maxvalue): + """Returns a random integer x with minvalue <= x <= maxvalue""" + + # Safety - get a lot of random data even if the range is fairly + # small + min_nbits = 32 + + # The range of the random numbers we need to generate + range = (maxvalue - minvalue) + 1 + + # Which is this number of bytes + rangebytes = ((bit_size(range) + 7) / 8) + + # Convert to bits, but make sure it's always at least min_nbits*2 + rangebits = max(rangebytes * 8, min_nbits * 2) + + # Take a random number of bits between min_nbits and rangebits + nbits = random.randint(min_nbits, rangebits) + + return (read_random_int(nbits) % range) + minvalue + +def jacobi(a, b): + """Calculates the value of the Jacobi symbol (a/b) + where both a and b are positive integers, and b is odd + """ + + if a == 0: return 0 + result = 1 + while a > 1: + if a & 1: + if ((a-1)*(b-1) >> 2) & 1: + result = -result + a, b = b % a, a + else: + if (((b * b) - 1) >> 3) & 1: + result = -result + a >>= 1 + if a == 0: return 0 + return result + +def jacobi_witness(x, n): + """Returns False if n is an Euler pseudo-prime with base x, and + True otherwise. + """ + + j = jacobi(x, n) % n + f = pow(x, (n-1)/2, n) + + if j == f: return False + return True + +def randomized_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or + prime (which is incorrect with error probability 2**-k) + + Returns False if the number is composite, and True if it's + probably prime. + """ + + # 50% of Jacobi-witnesses can report compositness of non-prime numbers + + for i in range(k): + x = randint(1, n-1) + if jacobi_witness(x, n): return False + + return True + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + """ + + if randomized_primality_testing(number, 6): + # Prime, according to Jacobi + return True + + # Not prime + return False + + +def getprime(nbits): + """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In + other words: nbits is rounded up to whole bytes. + """ + + while True: + integer = read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if is_prime(integer): break + + # Retry if not prime + + return integer + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + + >>> are_relatively_prime(2, 3) + 1 + >>> are_relatively_prime(2, 4) + 0 + """ + + d = gcd(a, b) + return (d == 1) + +def find_p_q(nbits): + """Returns a tuple of two different primes of nbits bits""" + pbits = nbits + (nbits/16) #Make sure that p and q aren't too close + qbits = nbits - (nbits/16) #or the factoring programs can factor n + p = getprime(pbits) + while True: + q = getprime(qbits) + #Make sure p and q are different. + if not q == p: break + return (p, q) + +def extended_gcd(a, b): + """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb + """ + # r = gcd(a,b) i = multiplicitive inverse of a mod b + # or j = multiplicitive inverse of b mod a + # Neg return values for i or j are made positive mod b or a respectively + # Iterateive Version is faster and uses much less stack space + x = 0 + y = 1 + lx = 1 + ly = 0 + oa = a #Remember original a/b to remove + ob = b #negative values from return results + while b != 0: + q = long(a/b) + (a, b) = (b, a % b) + (x, lx) = ((lx - (q * x)),x) + (y, ly) = ((ly - (q * y)),y) + if (lx < 0): lx += ob #If neg wrap modulo orignal b + if (ly < 0): ly += oa #If neg wrap modulo orignal a + return (a, lx, ly) #Return only positive values + +# Main function: calculate encryption and decryption keys +def calculate_keys(p, q, nbits): + """Calculates an encryption and a decryption key for p and q, and + returns them as a tuple (e, d)""" + + n = p * q + phi_n = (p-1) * (q-1) + + while True: + # Make sure e has enough bits so we ensure "wrapping" through + # modulo n + e = max(65537,getprime(nbits/4)) + if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break + + (d, i, j) = extended_gcd(e, phi_n) + + if not d == 1: + raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) + if (i < 0): + raise Exception("New extended_gcd shouldn't return negative values") + if not (e * i) % phi_n == 1: + raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) + + return (e, i) + + +def gen_keys(nbits): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + """ + + (p, q) = find_p_q(nbits) + (e, d) = calculate_keys(p, q, nbits) + + return (p, q, e, d) + +def newkeys(nbits): + """Generates public and private keys, and returns them as (pub, + priv). + + The public key consists of a dict {e: ..., , n: ....). The private + key consists of a dict {d: ...., p: ...., q: ....). + """ + nbits = max(9,nbits) # Don't let nbits go below 9 bits + (p, q, e, d) = gen_keys(nbits) + + return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo n""" + + if type(message) is types.IntType: + message = long(message) + + if not type(message) is types.LongType: + raise TypeError("You must pass a long or int") + + if message < 0 or message > n: + raise OverflowError("The message is too long") + + #Note: Bit exponents start at zero (bit counts start at 1) this is correct + safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) + message += (1 << safebit) #add safebit to ensure folding + + return pow(message, ekey, n) + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working + modulo n""" + + message = pow(cyphertext, dkey, n) + + safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) + message -= (1 << safebit) #remove safebit before decode + + return message + +def encode64chops(chops): + """base64encodes chops and combines them into a ',' delimited string""" + + chips = [] #chips are character chops + + for value in chops: + chips.append(int2str64(value)) + + #delimit chops with comma + encoded = ','.join(chips) + + return encoded + +def decode64chops(string): + """base64decodes and makes a ',' delimited string into chops""" + + chips = string.split(',') #split chops at commas + + chops = [] + + for string in chips: #make char chops (chips) into chops + chops.append(str642int(string)) + + return chops + +def chopstring(message, key, n, funcref): + """Chops the 'message' into integers that fit into n, + leaving room for a safebit to be added to ensure that all + messages fold during exponentiation. The MSB of the number n + is not independant modulo n (setting it could cause overflow), so + use the next lower bit for the safebit. Therefore reserve 2-bits + in the number n for non-data bits. Calls specified encryption + function for each chop. + + Used by 'encrypt' and 'sign'. + """ + + msglen = len(message) + mbits = msglen * 8 + #Set aside 2-bits so setting of safebit won't overflow modulo n. + nbits = bit_size(n) - 2 # leave room for safebit + nbytes = nbits / 8 + blocks = msglen / nbytes + + if msglen % nbytes > 0: + blocks += 1 + + cypher = [] + + for bindex in range(blocks): + offset = bindex * nbytes + block = message[offset:offset+nbytes] + value = bytes2int(block) + cypher.append(funcref(value, key, n)) + + return encode64chops(cypher) #Encode encrypted ints to base64 strings + +def gluechops(string, key, n, funcref): + """Glues chops back together into a string. calls + funcref(integer, key, n) for each chop. + + Used by 'decrypt' and 'verify'. + """ + message = "" + + chops = decode64chops(string) #Decode base64 strings into integer chops + + for cpart in chops: + mpart = funcref(cpart, key, n) #Decrypt each chop + message += int2bytes(mpart) #Combine decrypted strings into a msg + + return message + +def encrypt(message, key): + """Encrypts a string 'message' with the public key 'key'""" + if 'n' not in key: + raise Exception("You must use the public key with encrypt") + + return chopstring(message, key['e'], key['n'], encrypt_int) + +def sign(message, key): + """Signs a string 'message' with the private key 'key'""" + if 'p' not in key: + raise Exception("You must use the private key with sign") + + return chopstring(message, key['d'], key['p']*key['q'], encrypt_int) + +def decrypt(cypher, key): + """Decrypts a string 'cypher' with the private key 'key'""" + if 'p' not in key: + raise Exception("You must use the private key with decrypt") + + return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) + +def verify(cypher, key): + """Verifies a string 'cypher' with the public key 'key'""" + if 'n' not in key: + raise Exception("You must use the public key with verify") + + return gluechops(cypher, key['e'], key['n'], decrypt_int) + +# Do doctest if we're not imported +if __name__ == "__main__": + import doctest + doctest.testmod() + +__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"] + diff --git a/src/lib/rsa/asn1.py b/src/lib/rsa/asn1.py new file mode 100644 index 00000000..b724b8f5 --- /dev/null +++ b/src/lib/rsa/asn1.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""ASN.1 definitions. + +Not all ASN.1-handling code use these definitions, but when it does, they should be here. +""" + +from pyasn1.type import univ, namedtype, tag + + +class PubKeyHeader(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('oid', univ.ObjectIdentifier()), + namedtype.NamedType('parameters', univ.Null()), + ) + + +class OpenSSLPubKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('header', PubKeyHeader()), + + # This little hack (the implicit tag) allows us to get a Bit String as Octet String + namedtype.NamedType('key', univ.OctetString().subtype( + implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))), + ) + + +class AsnPubKey(univ.Sequence): + """ASN.1 contents of DER encoded public key: + + RSAPublicKey ::= SEQUENCE { + modulus INTEGER, -- n + publicExponent INTEGER, -- e + """ + + componentType = namedtype.NamedTypes( + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + ) diff --git a/src/lib/rsa/bigfile.py b/src/lib/rsa/bigfile.py new file mode 100644 index 00000000..3a09716a --- /dev/null +++ b/src/lib/rsa/bigfile.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Large file support + +.. deprecated:: 3.4 + + The VARBLOCK format is NOT recommended for general use, has been deprecated since + Python-RSA 3.4, and will be removed in a future release. It's vulnerable to a + number of attacks: + + 1. decrypt/encrypt_bigfile() does not implement `Authenticated encryption`_ nor + uses MACs to verify messages before decrypting public key encrypted messages. + + 2. decrypt/encrypt_bigfile() does not use hybrid encryption (it uses plain RSA) + and has no method for chaining, so block reordering is possible. + + See `issue #19 on Github`_ for more information. + +.. _Authenticated encryption: https://en.wikipedia.org/wiki/Authenticated_encryption +.. _issue #19 on Github: https://github.com/sybrenstuvel/python-rsa/issues/13 + + +This module contains functions to: + + - break a file into smaller blocks, and encrypt them, and store the + encrypted blocks in another file. + + - take such an encrypted files, decrypt its blocks, and reconstruct the + original file. + +The encrypted file format is as follows, where || denotes byte concatenation: + + FILE := VERSION || BLOCK || BLOCK ... + + BLOCK := LENGTH || DATA + + LENGTH := varint-encoded length of the subsequent data. Varint comes from + Google Protobuf, and encodes an integer into a variable number of bytes. + Each byte uses the 7 lowest bits to encode the value. The highest bit set + to 1 indicates the next byte is also part of the varint. The last byte will + have this bit set to 0. + +This file format is called the VARBLOCK format, in line with the varint format +used to denote the block sizes. + +""" + +import warnings + +from rsa import key, common, pkcs1, varblock +from rsa._compat import byte + + +def encrypt_bigfile(infile, outfile, pub_key): + """Encrypts a file, writing it to 'outfile' in VARBLOCK format. + + .. deprecated:: 3.4 + This function was deprecated in Python-RSA version 3.4 due to security issues + in the VARBLOCK format. See the documentation_ for more information. + + .. _documentation: https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files + + :param infile: file-like object to read the cleartext from + :param outfile: file-like object to write the crypto in VARBLOCK format to + :param pub_key: :py:class:`rsa.PublicKey` to encrypt with + + """ + + warnings.warn("The 'rsa.bigfile.encrypt_bigfile' function was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files " + "for more information.", + DeprecationWarning, stacklevel=2) + + if not isinstance(pub_key, key.PublicKey): + raise TypeError('Public key required, but got %r' % pub_key) + + key_bytes = common.bit_size(pub_key.n) // 8 + blocksize = key_bytes - 11 # keep space for PKCS#1 padding + + # Write the version number to the VARBLOCK file + outfile.write(byte(varblock.VARBLOCK_VERSION)) + + # Encrypt and write each block + for block in varblock.yield_fixedblocks(infile, blocksize): + crypto = pkcs1.encrypt(block, pub_key) + + varblock.write_varint(outfile, len(crypto)) + outfile.write(crypto) + + +def decrypt_bigfile(infile, outfile, priv_key): + """Decrypts an encrypted VARBLOCK file, writing it to 'outfile' + + .. deprecated:: 3.4 + This function was deprecated in Python-RSA version 3.4 due to security issues + in the VARBLOCK format. See the documentation_ for more information. + + .. _documentation: https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files + + :param infile: file-like object to read the crypto in VARBLOCK format from + :param outfile: file-like object to write the cleartext to + :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with + + """ + + warnings.warn("The 'rsa.bigfile.decrypt_bigfile' function was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files " + "for more information.", + DeprecationWarning, stacklevel=2) + + if not isinstance(priv_key, key.PrivateKey): + raise TypeError('Private key required, but got %r' % priv_key) + + for block in varblock.yield_varblocks(infile): + cleartext = pkcs1.decrypt(block, priv_key) + outfile.write(cleartext) + + +__all__ = ['encrypt_bigfile', 'decrypt_bigfile'] diff --git a/src/lib/rsa/cli.py b/src/lib/rsa/cli.py new file mode 100644 index 00000000..3a218782 --- /dev/null +++ b/src/lib/rsa/cli.py @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Commandline scripts. + +These scripts are called by the executables defined in setup.py. +""" + +from __future__ import with_statement, print_function + +import abc +import sys +from optparse import OptionParser + +import rsa +import rsa.bigfile +import rsa.pkcs1 + +HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys()) + + +def keygen(): + """Key generator.""" + + # Parse the CLI options + parser = OptionParser(usage='usage: %prog [options] keysize', + description='Generates a new RSA keypair of "keysize" bits.') + + parser.add_option('--pubout', type='string', + help='Output filename for the public key. The public key is ' + 'not saved if this option is not present. You can use ' + 'pyrsa-priv2pub to create the public key file later.') + + parser.add_option('-o', '--out', type='string', + help='Output filename for the private key. The key is ' + 'written to stdout if this option is not present.') + + parser.add_option('--form', + help='key format of the private and public keys - default PEM', + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv[1:]) + + if len(cli_args) != 1: + parser.print_help() + raise SystemExit(1) + + try: + keysize = int(cli_args[0]) + except ValueError: + parser.print_help() + print('Not a valid number: %s' % cli_args[0], file=sys.stderr) + raise SystemExit(1) + + print('Generating %i-bit key' % keysize, file=sys.stderr) + (pub_key, priv_key) = rsa.newkeys(keysize) + + # Save public key + if cli.pubout: + print('Writing public key to %s' % cli.pubout, file=sys.stderr) + data = pub_key.save_pkcs1(format=cli.form) + with open(cli.pubout, 'wb') as outfile: + outfile.write(data) + + # Save private key + data = priv_key.save_pkcs1(format=cli.form) + + if cli.out: + print('Writing private key to %s' % cli.out, file=sys.stderr) + with open(cli.out, 'wb') as outfile: + outfile.write(data) + else: + print('Writing private key to stdout', file=sys.stderr) + sys.stdout.write(data) + + +class CryptoOperation(object): + """CLI callable that operates with input, output, and a key.""" + + __metaclass__ = abc.ABCMeta + + keyname = 'public' # or 'private' + usage = 'usage: %%prog [options] %(keyname)s_key' + description = None + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \ + 'not specified.' + output_help = 'Name of the file to write the %(operation_past)s file ' \ + 'to. Written to stdout if this option is not present.' + expected_cli_args = 1 + has_output = True + + key_class = rsa.PublicKey + + def __init__(self): + self.usage = self.usage % self.__class__.__dict__ + self.input_help = self.input_help % self.__class__.__dict__ + self.output_help = self.output_help % self.__class__.__dict__ + + @abc.abstractmethod + def perform_operation(self, indata, key, cli_args=None): + """Performs the program's operation. + + Implement in a subclass. + + :returns: the data to write to the output. + """ + + def __call__(self): + """Runs the program.""" + + (cli, cli_args) = self.parse_cli() + + key = self.read_key(cli_args[0], cli.keyform) + + indata = self.read_infile(cli.input) + + print(self.operation_progressive.title(), file=sys.stderr) + outdata = self.perform_operation(indata, key, cli_args) + + if self.has_output: + self.write_outfile(outdata, cli.output) + + def parse_cli(self): + """Parse the CLI options + + :returns: (cli_opts, cli_args) + """ + + parser = OptionParser(usage=self.usage, description=self.description) + + parser.add_option('-i', '--input', type='string', help=self.input_help) + + if self.has_output: + parser.add_option('-o', '--output', type='string', help=self.output_help) + + parser.add_option('--keyform', + help='Key format of the %s key - default PEM' % self.keyname, + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv[1:]) + + if len(cli_args) != self.expected_cli_args: + parser.print_help() + raise SystemExit(1) + + return cli, cli_args + + def read_key(self, filename, keyform): + """Reads a public or private key.""" + + print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr) + with open(filename, 'rb') as keyfile: + keydata = keyfile.read() + + return self.key_class.load_pkcs1(keydata, keyform) + + def read_infile(self, inname): + """Read the input file""" + + if inname: + print('Reading input from %s' % inname, file=sys.stderr) + with open(inname, 'rb') as infile: + return infile.read() + + print('Reading input from stdin', file=sys.stderr) + return sys.stdin.read() + + def write_outfile(self, outdata, outname): + """Write the output file""" + + if outname: + print('Writing output to %s' % outname, file=sys.stderr) + with open(outname, 'wb') as outfile: + outfile.write(outdata) + else: + print('Writing output to stdout', file=sys.stderr) + sys.stdout.write(outdata) + + +class EncryptOperation(CryptoOperation): + """Encrypts a file.""" + + keyname = 'public' + description = ('Encrypts a file. The file must be shorter than the key ' + 'length in order to be encrypted. For larger files, use the ' + 'pyrsa-encrypt-bigfile command.') + operation = 'encrypt' + operation_past = 'encrypted' + operation_progressive = 'encrypting' + + def perform_operation(self, indata, pub_key, cli_args=None): + """Encrypts files.""" + + return rsa.encrypt(indata, pub_key) + + +class DecryptOperation(CryptoOperation): + """Decrypts a file.""" + + keyname = 'private' + description = ('Decrypts a file. The original file must be shorter than ' + 'the key length in order to have been encrypted. For larger ' + 'files, use the pyrsa-decrypt-bigfile command.') + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + key_class = rsa.PrivateKey + + def perform_operation(self, indata, priv_key, cli_args=None): + """Decrypts files.""" + + return rsa.decrypt(indata, priv_key) + + +class SignOperation(CryptoOperation): + """Signs a file.""" + + keyname = 'private' + usage = 'usage: %%prog [options] private_key hash_method' + description = ('Signs a file, outputs the signature. Choose the hash ' + 'method from %s' % ', '.join(HASH_METHODS)) + operation = 'sign' + operation_past = 'signature' + operation_progressive = 'Signing' + key_class = rsa.PrivateKey + expected_cli_args = 2 + + output_help = ('Name of the file to write the signature to. Written ' + 'to stdout if this option is not present.') + + def perform_operation(self, indata, priv_key, cli_args): + """Signs files.""" + + hash_method = cli_args[1] + if hash_method not in HASH_METHODS: + raise SystemExit('Invalid hash method, choose one of %s' % + ', '.join(HASH_METHODS)) + + return rsa.sign(indata, priv_key, hash_method) + + +class VerifyOperation(CryptoOperation): + """Verify a signature.""" + + keyname = 'public' + usage = 'usage: %%prog [options] public_key signature_file' + description = ('Verifies a signature, exits with status 0 upon success, ' + 'prints an error message and exits with status 1 upon error.') + operation = 'verify' + operation_past = 'verified' + operation_progressive = 'Verifying' + key_class = rsa.PublicKey + expected_cli_args = 2 + has_output = False + + def perform_operation(self, indata, pub_key, cli_args): + """Verifies files.""" + + signature_file = cli_args[1] + + with open(signature_file, 'rb') as sigfile: + signature = sigfile.read() + + try: + rsa.verify(indata, signature, pub_key) + except rsa.VerificationError: + raise SystemExit('Verification failed.') + + print('Verification OK', file=sys.stderr) + + +class BigfileOperation(CryptoOperation): + """CryptoOperation that doesn't read the entire file into memory.""" + + def __init__(self): + CryptoOperation.__init__(self) + + self.file_objects = [] + + def __del__(self): + """Closes any open file handles.""" + + for fobj in self.file_objects: + fobj.close() + + def __call__(self): + """Runs the program.""" + + (cli, cli_args) = self.parse_cli() + + key = self.read_key(cli_args[0], cli.keyform) + + # Get the file handles + infile = self.get_infile(cli.input) + outfile = self.get_outfile(cli.output) + + # Call the operation + print(self.operation_progressive.title(), file=sys.stderr) + self.perform_operation(infile, outfile, key, cli_args) + + def get_infile(self, inname): + """Returns the input file object""" + + if inname: + print('Reading input from %s' % inname, file=sys.stderr) + fobj = open(inname, 'rb') + self.file_objects.append(fobj) + else: + print('Reading input from stdin', file=sys.stderr) + fobj = sys.stdin + + return fobj + + def get_outfile(self, outname): + """Returns the output file object""" + + if outname: + print('Will write output to %s' % outname, file=sys.stderr) + fobj = open(outname, 'wb') + self.file_objects.append(fobj) + else: + print('Will write output to stdout', file=sys.stderr) + fobj = sys.stdout + + return fobj + + +class EncryptBigfileOperation(BigfileOperation): + """Encrypts a file to VARBLOCK format.""" + + keyname = 'public' + description = ('Encrypts a file to an encrypted VARBLOCK file. The file ' + 'can be larger than the key length, but the output file is only ' + 'compatible with Python-RSA.') + operation = 'encrypt' + operation_past = 'encrypted' + operation_progressive = 'encrypting' + + def perform_operation(self, infile, outfile, pub_key, cli_args=None): + """Encrypts files to VARBLOCK.""" + + return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key) + + +class DecryptBigfileOperation(BigfileOperation): + """Decrypts a file in VARBLOCK format.""" + + keyname = 'private' + description = ('Decrypts an encrypted VARBLOCK file that was encrypted ' + 'with pyrsa-encrypt-bigfile') + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + key_class = rsa.PrivateKey + + def perform_operation(self, infile, outfile, priv_key, cli_args=None): + """Decrypts a VARBLOCK file.""" + + return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key) + + +encrypt = EncryptOperation() +decrypt = DecryptOperation() +sign = SignOperation() +verify = VerifyOperation() +encrypt_bigfile = EncryptBigfileOperation() +decrypt_bigfile = DecryptBigfileOperation() diff --git a/src/lib/rsa/common.py b/src/lib/rsa/common.py new file mode 100644 index 00000000..e0743340 --- /dev/null +++ b/src/lib/rsa/common.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common functionality shared by several modules.""" + + +def bit_size(num): + """ + Number of bits needed to represent a integer excluding any prefix + 0 bits. + + As per definition from https://wiki.python.org/moin/BitManipulation and + to match the behavior of the Python 3 API. + + Usage:: + + >>> bit_size(1023) + 10 + >>> bit_size(1024) + 11 + >>> bit_size(1025) + 11 + + :param num: + Integer value. If num is 0, returns 0. Only the absolute value of the + number is considered. Therefore, signed integers will be abs(num) + before the number's bit length is determined. + :returns: + Returns the number of bits in the integer. + """ + if num == 0: + return 0 + if num < 0: + num = -num + + # Make sure this is an int and not a float. + num & 1 + + hex_num = "%x" % num + return ((len(hex_num) - 1) * 4) + { + '0': 0, '1': 1, '2': 2, '3': 2, + '4': 3, '5': 3, '6': 3, '7': 3, + '8': 4, '9': 4, 'a': 4, 'b': 4, + 'c': 4, 'd': 4, 'e': 4, 'f': 4, + }[hex_num[0]] + + +def _bit_size(number): + """ + Returns the number of bits required to hold a specific long number. + """ + if number < 0: + raise ValueError('Only nonnegative numbers possible: %s' % number) + + if number == 0: + return 0 + + # This works, even with very large numbers. When using math.log(number, 2), + # you'll get rounding errors and it'll fail. + bits = 0 + while number: + bits += 1 + number >>= 1 + + return bits + + +def byte_size(number): + """ + Returns the number of bytes required to hold a specific long number. + + The number of bytes is rounded up. + + Usage:: + + >>> byte_size(1 << 1023) + 128 + >>> byte_size((1 << 1024) - 1) + 128 + >>> byte_size(1 << 1024) + 129 + + :param number: + An unsigned integer + :returns: + The number of bytes required to hold a specific long number. + """ + quanta, mod = divmod(bit_size(number), 8) + if mod or number == 0: + quanta += 1 + return quanta + # return int(math.ceil(bit_size(number) / 8.0)) + + +def extended_gcd(a, b): + """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb + """ + # r = gcd(a,b) i = multiplicitive inverse of a mod b + # or j = multiplicitive inverse of b mod a + # Neg return values for i or j are made positive mod b or a respectively + # Iterateive Version is faster and uses much less stack space + x = 0 + y = 1 + lx = 1 + ly = 0 + oa = a # Remember original a/b to remove + ob = b # negative values from return results + while b != 0: + q = a // b + (a, b) = (b, a % b) + (x, lx) = ((lx - (q * x)), x) + (y, ly) = ((ly - (q * y)), y) + if lx < 0: + lx += ob # If neg wrap modulo orignal b + if ly < 0: + ly += oa # If neg wrap modulo orignal a + return a, lx, ly # Return only positive values + + +def inverse(x, n): + """Returns x^-1 (mod n) + + >>> inverse(7, 4) + 3 + >>> (inverse(143, 4) * 143) % 4 + 1 + """ + + (divider, inv, _) = extended_gcd(x, n) + + if divider != 1: + raise ValueError("x (%d) and n (%d) are not relatively prime" % (x, n)) + + return inv + + +def crt(a_values, modulo_values): + """Chinese Remainder Theorem. + + Calculates x such that x = a[i] (mod m[i]) for each i. + + :param a_values: the a-values of the above equation + :param modulo_values: the m-values of the above equation + :returns: x such that x = a[i] (mod m[i]) for each i + + + >>> crt([2, 3], [3, 5]) + 8 + + >>> crt([2, 3, 2], [3, 5, 7]) + 23 + + >>> crt([2, 3, 0], [7, 11, 15]) + 135 + """ + + m = 1 + x = 0 + + for modulo in modulo_values: + m *= modulo + + for (m_i, a_i) in zip(modulo_values, a_values): + M_i = m // m_i + inv = inverse(M_i, m_i) + + x = (x + a_i * M_i * inv) % m + + return x + + +if __name__ == '__main__': + import doctest + + doctest.testmod() diff --git a/src/lib/rsa/core.py b/src/lib/rsa/core.py new file mode 100644 index 00000000..b3114d9e --- /dev/null +++ b/src/lib/rsa/core.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Core mathematical operations. + +This is the actual core RSA implementation, which is only defined +mathematically on integers. +""" + +from rsa._compat import is_integer + + +def assert_int(var, name): + if is_integer(var): + return + + raise TypeError('%s should be an integer, not %s' % (name, var.__class__)) + + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo n""" + + assert_int(message, 'message') + assert_int(ekey, 'ekey') + assert_int(n, 'n') + + if message < 0: + raise ValueError('Only non-negative numbers are supported') + + if message > n: + raise OverflowError("The message %i is too long for n=%i" % (message, n)) + + return pow(message, ekey, n) + + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working modulo n""" + + assert_int(cyphertext, 'cyphertext') + assert_int(dkey, 'dkey') + assert_int(n, 'n') + + message = pow(cyphertext, dkey, n) + return message diff --git a/src/lib/rsa/key.py b/src/lib/rsa/key.py new file mode 100644 index 00000000..64600a27 --- /dev/null +++ b/src/lib/rsa/key.py @@ -0,0 +1,739 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""RSA key generation code. + +Create new keys with the newkeys() function. It will give you a PublicKey and a +PrivateKey object. + +Loading and saving keys requires the pyasn1 module. This module is imported as +late as possible, such that other functionality will remain working in absence +of pyasn1. + +.. note:: + + Storing public and private keys via the `pickle` module is possible. + However, it is insecure to load a key from an untrusted source. + The pickle module is not secure against erroneous or maliciously + constructed data. Never unpickle data received from an untrusted + or unauthenticated source. + +""" + +import logging +from rsa._compat import b + +import rsa.prime +import rsa.pem +import rsa.common +import rsa.randnum +import rsa.core + +log = logging.getLogger(__name__) +DEFAULT_EXPONENT = 65537 + + +class AbstractKey(object): + """Abstract superclass for private and public keys.""" + + __slots__ = ('n', 'e') + + def __init__(self, n, e): + self.n = n + self.e = e + + @classmethod + def load_pkcs1(cls, keyfile, format='PEM'): + """Loads a key in PKCS#1 DER or PEM format. + + :param keyfile: contents of a DER- or PEM-encoded file that contains + the public key. + :param format: the format of the file to load; 'PEM' or 'DER' + + :return: a PublicKey object + """ + + methods = { + 'PEM': cls._load_pkcs1_pem, + 'DER': cls._load_pkcs1_der, + } + + method = cls._assert_format_exists(format, methods) + return method(keyfile) + + @staticmethod + def _assert_format_exists(file_format, methods): + """Checks whether the given file format exists in 'methods'. + """ + + try: + return methods[file_format] + except KeyError: + formats = ', '.join(sorted(methods.keys())) + raise ValueError('Unsupported format: %r, try one of %s' % (file_format, + formats)) + + def save_pkcs1(self, format='PEM'): + """Saves the public key in PKCS#1 DER or PEM format. + + :param format: the format to save; 'PEM' or 'DER' + :returns: the DER- or PEM-encoded public key. + """ + + methods = { + 'PEM': self._save_pkcs1_pem, + 'DER': self._save_pkcs1_der, + } + + method = self._assert_format_exists(format, methods) + return method() + + def blind(self, message, r): + """Performs blinding on the message using random number 'r'. + + :param message: the message, as integer, to blind. + :type message: int + :param r: the random number to blind with. + :type r: int + :return: the blinded message. + :rtype: int + + The blinding is such that message = unblind(decrypt(blind(encrypt(message))). + + See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29 + """ + + return (message * pow(r, self.e, self.n)) % self.n + + def unblind(self, blinded, r): + """Performs blinding on the message using random number 'r'. + + :param blinded: the blinded message, as integer, to unblind. + :param r: the random number to unblind with. + :return: the original message. + + The blinding is such that message = unblind(decrypt(blind(encrypt(message))). + + See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29 + """ + + return (rsa.common.inverse(r, self.n) * blinded) % self.n + + +class PublicKey(AbstractKey): + """Represents a public RSA key. + + This key is also known as the 'encryption key'. It contains the 'n' and 'e' + values. + + Supports attributes as well as dictionary-like access. Attribute accesss is + faster, though. + + >>> PublicKey(5, 3) + PublicKey(5, 3) + + >>> key = PublicKey(5, 3) + >>> key.n + 5 + >>> key['n'] + 5 + >>> key.e + 3 + >>> key['e'] + 3 + + """ + + __slots__ = ('n', 'e') + + def __getitem__(self, key): + return getattr(self, key) + + def __repr__(self): + return 'PublicKey(%i, %i)' % (self.n, self.e) + + def __getstate__(self): + """Returns the key as tuple for pickling.""" + return self.n, self.e + + def __setstate__(self, state): + """Sets the key from tuple.""" + self.n, self.e = state + + def __eq__(self, other): + if other is None: + return False + + if not isinstance(other, PublicKey): + return False + + return self.n == other.n and self.e == other.e + + def __ne__(self, other): + return not (self == other) + + @classmethod + def _load_pkcs1_der(cls, keyfile): + """Loads a key in PKCS#1 DER format. + + :param keyfile: contents of a DER-encoded file that contains the public + key. + :return: a PublicKey object + + First let's construct a DER encoded key: + + >>> import base64 + >>> b64der = 'MAwCBQCNGmYtAgMBAAE=' + >>> der = base64.standard_b64decode(b64der) + + This loads the file: + + >>> PublicKey._load_pkcs1_der(der) + PublicKey(2367317549, 65537) + + """ + + from pyasn1.codec.der import decoder + from rsa.asn1 import AsnPubKey + + (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey()) + return cls(n=int(priv['modulus']), e=int(priv['publicExponent'])) + + def _save_pkcs1_der(self): + """Saves the public key in PKCS#1 DER format. + + @returns: the DER-encoded public key. + """ + + from pyasn1.codec.der import encoder + from rsa.asn1 import AsnPubKey + + # Create the ASN object + asn_key = AsnPubKey() + asn_key.setComponentByName('modulus', self.n) + asn_key.setComponentByName('publicExponent', self.e) + + return encoder.encode(asn_key) + + @classmethod + def _load_pkcs1_pem(cls, keyfile): + """Loads a PKCS#1 PEM-encoded public key file. + + The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and + after the "-----END RSA PUBLIC KEY-----" lines is ignored. + + :param keyfile: contents of a PEM-encoded file that contains the public + key. + :return: a PublicKey object + """ + + der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY') + return cls._load_pkcs1_der(der) + + def _save_pkcs1_pem(self): + """Saves a PKCS#1 PEM-encoded public key file. + + :return: contents of a PEM-encoded file that contains the public key. + """ + + der = self._save_pkcs1_der() + return rsa.pem.save_pem(der, 'RSA PUBLIC KEY') + + @classmethod + def load_pkcs1_openssl_pem(cls, keyfile): + """Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. + + These files can be recognised in that they start with BEGIN PUBLIC KEY + rather than BEGIN RSA PUBLIC KEY. + + The contents of the file before the "-----BEGIN PUBLIC KEY-----" and + after the "-----END PUBLIC KEY-----" lines is ignored. + + :param keyfile: contents of a PEM-encoded file that contains the public + key, from OpenSSL. + :return: a PublicKey object + """ + + der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY') + return cls.load_pkcs1_openssl_der(der) + + @classmethod + def load_pkcs1_openssl_der(cls, keyfile): + """Loads a PKCS#1 DER-encoded public key file from OpenSSL. + + :param keyfile: contents of a DER-encoded file that contains the public + key, from OpenSSL. + :return: a PublicKey object + + """ + + from rsa.asn1 import OpenSSLPubKey + from pyasn1.codec.der import decoder + from pyasn1.type import univ + + (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey()) + + if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'): + raise TypeError("This is not a DER-encoded OpenSSL-compatible public key") + + return cls._load_pkcs1_der(keyinfo['key'][1:]) + + +class PrivateKey(AbstractKey): + """Represents a private RSA key. + + This key is also known as the 'decryption key'. It contains the 'n', 'e', + 'd', 'p', 'q' and other values. + + Supports attributes as well as dictionary-like access. Attribute accesss is + faster, though. + + >>> PrivateKey(3247, 65537, 833, 191, 17) + PrivateKey(3247, 65537, 833, 191, 17) + + exp1, exp2 and coef can be given, but if None or omitted they will be calculated: + + >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287, exp2=4) + >>> pk.exp1 + 55063 + >>> pk.exp2 # this is of course not a correct value, but it is the one we passed. + 4 + >>> pk.coef + 50797 + + If you give exp1, exp2 or coef, they will be used as-is: + + >>> pk = PrivateKey(1, 2, 3, 4, 5, 6, 7, 8) + >>> pk.exp1 + 6 + >>> pk.exp2 + 7 + >>> pk.coef + 8 + + """ + + __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef') + + def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None): + AbstractKey.__init__(self, n, e) + self.d = d + self.p = p + self.q = q + + # Calculate the other values if they aren't supplied + if exp1 is None: + self.exp1 = int(d % (p - 1)) + else: + self.exp1 = exp1 + + if exp2 is None: + self.exp2 = int(d % (q - 1)) + else: + self.exp2 = exp2 + + if coef is None: + self.coef = rsa.common.inverse(q, p) + else: + self.coef = coef + + def __getitem__(self, key): + return getattr(self, key) + + def __repr__(self): + return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self + + def __getstate__(self): + """Returns the key as tuple for pickling.""" + return self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef + + def __setstate__(self, state): + """Sets the key from tuple.""" + self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef = state + + def __eq__(self, other): + if other is None: + return False + + if not isinstance(other, PrivateKey): + return False + + return (self.n == other.n and + self.e == other.e and + self.d == other.d and + self.p == other.p and + self.q == other.q and + self.exp1 == other.exp1 and + self.exp2 == other.exp2 and + self.coef == other.coef) + + def __ne__(self, other): + return not (self == other) + + def blinded_decrypt(self, encrypted): + """Decrypts the message using blinding to prevent side-channel attacks. + + :param encrypted: the encrypted message + :type encrypted: int + + :returns: the decrypted message + :rtype: int + """ + + blind_r = rsa.randnum.randint(self.n - 1) + blinded = self.blind(encrypted, blind_r) # blind before decrypting + decrypted = rsa.core.decrypt_int(blinded, self.d, self.n) + + return self.unblind(decrypted, blind_r) + + def blinded_encrypt(self, message): + """Encrypts the message using blinding to prevent side-channel attacks. + + :param message: the message to encrypt + :type message: int + + :returns: the encrypted message + :rtype: int + """ + + blind_r = rsa.randnum.randint(self.n - 1) + blinded = self.blind(message, blind_r) # blind before encrypting + encrypted = rsa.core.encrypt_int(blinded, self.d, self.n) + return self.unblind(encrypted, blind_r) + + @classmethod + def _load_pkcs1_der(cls, keyfile): + """Loads a key in PKCS#1 DER format. + + :param keyfile: contents of a DER-encoded file that contains the private + key. + :return: a PrivateKey object + + First let's construct a DER encoded key: + + >>> import base64 + >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt' + >>> der = base64.standard_b64decode(b64der) + + This loads the file: + + >>> PrivateKey._load_pkcs1_der(der) + PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + + """ + + from pyasn1.codec.der import decoder + (priv, _) = decoder.decode(keyfile) + + # ASN.1 contents of DER encoded private key: + # + # RSAPrivateKey ::= SEQUENCE { + # version Version, + # modulus INTEGER, -- n + # publicExponent INTEGER, -- e + # privateExponent INTEGER, -- d + # prime1 INTEGER, -- p + # prime2 INTEGER, -- q + # exponent1 INTEGER, -- d mod (p-1) + # exponent2 INTEGER, -- d mod (q-1) + # coefficient INTEGER, -- (inverse of q) mod p + # otherPrimeInfos OtherPrimeInfos OPTIONAL + # } + + if priv[0] != 0: + raise ValueError('Unable to read this file, version %s != 0' % priv[0]) + + as_ints = tuple(int(x) for x in priv[1:9]) + return cls(*as_ints) + + def _save_pkcs1_der(self): + """Saves the private key in PKCS#1 DER format. + + @returns: the DER-encoded private key. + """ + + from pyasn1.type import univ, namedtype + from pyasn1.codec.der import encoder + + class AsnPrivKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer()), + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + namedtype.NamedType('privateExponent', univ.Integer()), + namedtype.NamedType('prime1', univ.Integer()), + namedtype.NamedType('prime2', univ.Integer()), + namedtype.NamedType('exponent1', univ.Integer()), + namedtype.NamedType('exponent2', univ.Integer()), + namedtype.NamedType('coefficient', univ.Integer()), + ) + + # Create the ASN object + asn_key = AsnPrivKey() + asn_key.setComponentByName('version', 0) + asn_key.setComponentByName('modulus', self.n) + asn_key.setComponentByName('publicExponent', self.e) + asn_key.setComponentByName('privateExponent', self.d) + asn_key.setComponentByName('prime1', self.p) + asn_key.setComponentByName('prime2', self.q) + asn_key.setComponentByName('exponent1', self.exp1) + asn_key.setComponentByName('exponent2', self.exp2) + asn_key.setComponentByName('coefficient', self.coef) + + return encoder.encode(asn_key) + + @classmethod + def _load_pkcs1_pem(cls, keyfile): + """Loads a PKCS#1 PEM-encoded private key file. + + The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and + after the "-----END RSA PRIVATE KEY-----" lines is ignored. + + :param keyfile: contents of a PEM-encoded file that contains the private + key. + :return: a PrivateKey object + """ + + der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY')) + return cls._load_pkcs1_der(der) + + def _save_pkcs1_pem(self): + """Saves a PKCS#1 PEM-encoded private key file. + + :return: contents of a PEM-encoded file that contains the private key. + """ + + der = self._save_pkcs1_der() + return rsa.pem.save_pem(der, b('RSA PRIVATE KEY')) + + +def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): + """Returns a tuple of two different primes of nbits bits each. + + The resulting p * q has exacty 2 * nbits bits, and the returned p and q + will not be equal. + + :param nbits: the number of bits in each of p and q. + :param getprime_func: the getprime function, defaults to + :py:func:`rsa.prime.getprime`. + + *Introduced in Python-RSA 3.1* + + :param accurate: whether to enable accurate mode or not. + :returns: (p, q), where p > q + + >>> (p, q) = find_p_q(128) + >>> from rsa import common + >>> common.bit_size(p * q) + 256 + + When not in accurate mode, the number of bits can be slightly less + + >>> (p, q) = find_p_q(128, accurate=False) + >>> from rsa import common + >>> common.bit_size(p * q) <= 256 + True + >>> common.bit_size(p * q) > 240 + True + + """ + + total_bits = nbits * 2 + + # Make sure that p and q aren't too close or the factoring programs can + # factor n. + shift = nbits // 16 + pbits = nbits + shift + qbits = nbits - shift + + # Choose the two initial primes + log.debug('find_p_q(%i): Finding p', nbits) + p = getprime_func(pbits) + log.debug('find_p_q(%i): Finding q', nbits) + q = getprime_func(qbits) + + def is_acceptable(p, q): + """Returns True iff p and q are acceptable: + + - p and q differ + - (p * q) has the right nr of bits (when accurate=True) + """ + + if p == q: + return False + + if not accurate: + return True + + # Make sure we have just the right amount of bits + found_size = rsa.common.bit_size(p * q) + return total_bits == found_size + + # Keep choosing other primes until they match our requirements. + change_p = False + while not is_acceptable(p, q): + # Change p on one iteration and q on the other + if change_p: + p = getprime_func(pbits) + else: + q = getprime_func(qbits) + + change_p = not change_p + + # We want p > q as described on + # http://www.di-mgt.com.au/rsa_alg.html#crt + return max(p, q), min(p, q) + + +def calculate_keys_custom_exponent(p, q, exponent): + """Calculates an encryption and a decryption key given p, q and an exponent, + and returns them as a tuple (e, d) + + :param p: the first large prime + :param q: the second large prime + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int + + """ + + phi_n = (p - 1) * (q - 1) + + try: + d = rsa.common.inverse(exponent, phi_n) + except ValueError: + raise ValueError("e (%d) and phi_n (%d) are not relatively prime" % + (exponent, phi_n)) + + if (exponent * d) % phi_n != 1: + raise ValueError("e (%d) and d (%d) are not mult. inv. modulo " + "phi_n (%d)" % (exponent, d, phi_n)) + + return exponent, d + + +def calculate_keys(p, q): + """Calculates an encryption and a decryption key given p and q, and + returns them as a tuple (e, d) + + :param p: the first large prime + :param q: the second large prime + + :return: tuple (e, d) with the encryption and decryption exponents. + """ + + return calculate_keys_custom_exponent(p, q, DEFAULT_EXPONENT) + + +def gen_keys(nbits, getprime_func, accurate=True, exponent=DEFAULT_EXPONENT): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + + :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and + ``q`` will use ``nbits/2`` bits. + :param getprime_func: either :py:func:`rsa.prime.getprime` or a function + with similar signature. + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int + """ + + # Regenerate p and q values, until calculate_keys doesn't raise a + # ValueError. + while True: + (p, q) = find_p_q(nbits // 2, getprime_func, accurate) + try: + (e, d) = calculate_keys_custom_exponent(p, q, exponent=exponent) + break + except ValueError: + pass + + return p, q, e, d + + +def newkeys(nbits, accurate=True, poolsize=1, exponent=DEFAULT_EXPONENT): + """Generates public and private keys, and returns them as (pub, priv). + + The public key is also known as the 'encryption key', and is a + :py:class:`rsa.PublicKey` object. The private key is also known as the + 'decryption key' and is a :py:class:`rsa.PrivateKey` object. + + :param nbits: the number of bits required to store ``n = p*q``. + :param accurate: when True, ``n`` will have exactly the number of bits you + asked for. However, this makes key generation much slower. When False, + `n`` may have slightly less bits. + :param poolsize: the number of processes to use to generate the prime + numbers. If set to a number > 1, a parallel algorithm will be used. + This requires Python 2.6 or newer. + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int + + :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`) + + The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires + Python 2.6 or newer. + + """ + + if nbits < 16: + raise ValueError('Key too small') + + if poolsize < 1: + raise ValueError('Pool size (%i) should be >= 1' % poolsize) + + # Determine which getprime function to use + if poolsize > 1: + from rsa import parallel + import functools + + getprime_func = functools.partial(parallel.getprime, poolsize=poolsize) + else: + getprime_func = rsa.prime.getprime + + # Generate the key components + (p, q, e, d) = gen_keys(nbits, getprime_func, accurate=accurate, exponent=exponent) + + # Create the key objects + n = p * q + + return ( + PublicKey(n, e), + PrivateKey(n, e, d, p, q) + ) + + +__all__ = ['PublicKey', 'PrivateKey', 'newkeys'] + +if __name__ == '__main__': + import doctest + + try: + for count in range(100): + (failures, tests) = doctest.testmod() + if failures: + break + + if (count and count % 10 == 0) or count == 1: + print('%i times' % count) + except KeyboardInterrupt: + print('Aborted') + else: + print('Doctests done') diff --git a/src/lib/rsa/parallel.py b/src/lib/rsa/parallel.py new file mode 100644 index 00000000..edc924fd --- /dev/null +++ b/src/lib/rsa/parallel.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions for parallel computation on multiple cores. + +Introduced in Python-RSA 3.1. + +.. note:: + + Requires Python 2.6 or newer. + +""" + +from __future__ import print_function + +import multiprocessing as mp + +import rsa.prime +import rsa.randnum + + +def _find_prime(nbits, pipe): + while True: + integer = rsa.randnum.read_random_odd_int(nbits) + + # Test for primeness + if rsa.prime.is_prime(integer): + pipe.send(integer) + return + + +def getprime(nbits, poolsize): + """Returns a prime number that can be stored in 'nbits' bits. + + Works in multiple threads at the same time. + + >>> p = getprime(128, 3) + >>> rsa.prime.is_prime(p-1) + False + >>> rsa.prime.is_prime(p) + True + >>> rsa.prime.is_prime(p+1) + False + + >>> from rsa import common + >>> common.bit_size(p) == 128 + True + + """ + + (pipe_recv, pipe_send) = mp.Pipe(duplex=False) + + # Create processes + try: + procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send)) + for _ in range(poolsize)] + # Start processes + for p in procs: + p.start() + + result = pipe_recv.recv() + finally: + pipe_recv.close() + pipe_send.close() + + # Terminate processes + for p in procs: + p.terminate() + + return result + + +__all__ = ['getprime'] + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(100): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 10 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/src/lib/rsa/pem.py b/src/lib/rsa/pem.py new file mode 100644 index 00000000..0f68cb2a --- /dev/null +++ b/src/lib/rsa/pem.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions that load and write PEM-encoded files.""" + +import base64 +from rsa._compat import b, is_bytes + + +def _markers(pem_marker): + """ + Returns the start and end PEM markers + """ + + if is_bytes(pem_marker): + pem_marker = pem_marker.decode('utf-8') + + return (b('-----BEGIN %s-----' % pem_marker), + b('-----END %s-----' % pem_marker)) + + +def load_pem(contents, pem_marker): + """Loads a PEM file. + + :param contents: the contents of the file to interpret + :param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + when your file has '-----BEGIN RSA PRIVATE KEY-----' and + '-----END RSA PRIVATE KEY-----' markers. + + :return: the base64-decoded content between the start and end markers. + + @raise ValueError: when the content is invalid, for example when the start + marker cannot be found. + + """ + + # We want bytes, not text. If it's text, it can be converted to ASCII bytes. + if not is_bytes(contents): + contents = contents.encode('ascii') + + (pem_start, pem_end) = _markers(pem_marker) + + pem_lines = [] + in_pem_part = False + + for line in contents.splitlines(): + line = line.strip() + + # Skip empty lines + if not line: + continue + + # Handle start marker + if line == pem_start: + if in_pem_part: + raise ValueError('Seen start marker "%s" twice' % pem_start) + + in_pem_part = True + continue + + # Skip stuff before first marker + if not in_pem_part: + continue + + # Handle end marker + if in_pem_part and line == pem_end: + in_pem_part = False + break + + # Load fields + if b(':') in line: + continue + + pem_lines.append(line) + + # Do some sanity checks + if not pem_lines: + raise ValueError('No PEM start marker "%s" found' % pem_start) + + if in_pem_part: + raise ValueError('No PEM end marker "%s" found' % pem_end) + + # Base64-decode the contents + pem = b('').join(pem_lines) + return base64.standard_b64decode(pem) + + +def save_pem(contents, pem_marker): + """Saves a PEM file. + + :param contents: the contents to encode in PEM format + :param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + when your file has '-----BEGIN RSA PRIVATE KEY-----' and + '-----END RSA PRIVATE KEY-----' markers. + + :return: the base64-encoded content between the start and end markers. + + """ + + (pem_start, pem_end) = _markers(pem_marker) + + b64 = base64.standard_b64encode(contents).replace(b('\n'), b('')) + pem_lines = [pem_start] + + for block_start in range(0, len(b64), 64): + block = b64[block_start:block_start + 64] + pem_lines.append(block) + + pem_lines.append(pem_end) + pem_lines.append(b('')) + + return b('\n').join(pem_lines) diff --git a/src/lib/rsa/pkcs1.py b/src/lib/rsa/pkcs1.py new file mode 100644 index 00000000..28f0dc54 --- /dev/null +++ b/src/lib/rsa/pkcs1.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions for PKCS#1 version 1.5 encryption and signing + +This module implements certain functionality from PKCS#1 version 1.5. For a +very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes + +At least 8 bytes of random padding is used when encrypting a message. This makes +these methods much more secure than the ones in the ``rsa`` module. + +WARNING: this module leaks information when decryption fails. The exceptions +that are raised contain the Python traceback information, which can be used to +deduce where in the process the failure occurred. DO NOT PASS SUCH INFORMATION +to your users. +""" + +import hashlib +import os + +from rsa._compat import b +from rsa import common, transform, core + +# ASN.1 codes that describe the hash algorithm used. +HASH_ASN1 = { + 'MD5': b('\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'), + 'SHA-1': b('\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'), + 'SHA-256': b('\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'), + 'SHA-384': b('\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'), + 'SHA-512': b('\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'), +} + +HASH_METHODS = { + 'MD5': hashlib.md5, + 'SHA-1': hashlib.sha1, + 'SHA-256': hashlib.sha256, + 'SHA-384': hashlib.sha384, + 'SHA-512': hashlib.sha512, +} + + +class CryptoError(Exception): + """Base class for all exceptions in this module.""" + + +class DecryptionError(CryptoError): + """Raised when decryption fails.""" + + +class VerificationError(CryptoError): + """Raised when verification fails.""" + + +def _pad_for_encryption(message, target_length): + r"""Pads the message for encryption, returning the padded message. + + :return: 00 02 RANDOM_DATA 00 MESSAGE + + >>> block = _pad_for_encryption(b'hello', 16) + >>> len(block) + 16 + >>> block[0:2] + b'\x00\x02' + >>> block[-6:] + b'\x00hello' + + """ + + max_msglength = target_length - 11 + msglength = len(message) + + if msglength > max_msglength: + raise OverflowError('%i bytes needed for message, but there is only' + ' space for %i' % (msglength, max_msglength)) + + # Get random padding + padding = b('') + padding_length = target_length - msglength - 3 + + # We remove 0-bytes, so we'll end up with less padding than we've asked for, + # so keep adding data until we're at the correct length. + while len(padding) < padding_length: + needed_bytes = padding_length - len(padding) + + # Always read at least 8 bytes more than we need, and trim off the rest + # after removing the 0-bytes. This increases the chance of getting + # enough bytes, especially when needed_bytes is small + new_padding = os.urandom(needed_bytes + 5) + new_padding = new_padding.replace(b('\x00'), b('')) + padding = padding + new_padding[:needed_bytes] + + assert len(padding) == padding_length + + return b('').join([b('\x00\x02'), + padding, + b('\x00'), + message]) + + +def _pad_for_signing(message, target_length): + r"""Pads the message for signing, returning the padded message. + + The padding is always a repetition of FF bytes. + + :return: 00 01 PADDING 00 MESSAGE + + >>> block = _pad_for_signing(b'hello', 16) + >>> len(block) + 16 + >>> block[0:2] + b'\x00\x01' + >>> block[-6:] + b'\x00hello' + >>> block[2:-6] + b'\xff\xff\xff\xff\xff\xff\xff\xff' + + """ + + max_msglength = target_length - 11 + msglength = len(message) + + if msglength > max_msglength: + raise OverflowError('%i bytes needed for message, but there is only' + ' space for %i' % (msglength, max_msglength)) + + padding_length = target_length - msglength - 3 + + return b('').join([b('\x00\x01'), + padding_length * b('\xff'), + b('\x00'), + message]) + + +def encrypt(message, pub_key): + """Encrypts the given message using PKCS#1 v1.5 + + :param message: the message to encrypt. Must be a byte string no longer than + ``k-11`` bytes, where ``k`` is the number of bytes needed to encode + the ``n`` component of the public key. + :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with. + :raise OverflowError: when the message is too large to fit in the padded + block. + + >>> from rsa import key, common + >>> (pub_key, priv_key) = key.newkeys(256) + >>> message = b'hello' + >>> crypto = encrypt(message, pub_key) + + The crypto text should be just as long as the public key 'n' component: + + >>> len(crypto) == common.byte_size(pub_key.n) + True + + """ + + keylength = common.byte_size(pub_key.n) + padded = _pad_for_encryption(message, keylength) + + payload = transform.bytes2int(padded) + encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n) + block = transform.int2bytes(encrypted, keylength) + + return block + + +def decrypt(crypto, priv_key): + r"""Decrypts the given message using PKCS#1 v1.5 + + The decryption is considered 'failed' when the resulting cleartext doesn't + start with the bytes 00 02, or when the 00 byte between the padding and + the message cannot be found. + + :param crypto: the crypto text as returned by :py:func:`rsa.encrypt` + :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with. + :raise DecryptionError: when the decryption fails. No details are given as + to why the code thinks the decryption fails, as this would leak + information about the private key. + + + >>> import rsa + >>> (pub_key, priv_key) = rsa.newkeys(256) + + It works with strings: + + >>> crypto = encrypt(b'hello', pub_key) + >>> decrypt(crypto, priv_key) + b'hello' + + And with binary data: + + >>> crypto = encrypt(b'\x00\x00\x00\x00\x01', pub_key) + >>> decrypt(crypto, priv_key) + b'\x00\x00\x00\x00\x01' + + Altering the encrypted information will *likely* cause a + :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use + :py:func:`rsa.sign`. + + + .. warning:: + + Never display the stack trace of a + :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the + code the exception occurred, and thus leaks information about the key. + It's only a tiny bit of information, but every bit makes cracking the + keys easier. + + >>> crypto = encrypt(b'hello', pub_key) + >>> crypto = crypto[0:5] + b'X' + crypto[6:] # change a byte + >>> decrypt(crypto, priv_key) + Traceback (most recent call last): + ... + rsa.pkcs1.DecryptionError: Decryption failed + + """ + + blocksize = common.byte_size(priv_key.n) + encrypted = transform.bytes2int(crypto) + decrypted = priv_key.blinded_decrypt(encrypted) + cleartext = transform.int2bytes(decrypted, blocksize) + + # If we can't find the cleartext marker, decryption failed. + if cleartext[0:2] != b('\x00\x02'): + raise DecryptionError('Decryption failed') + + # Find the 00 separator between the padding and the message + try: + sep_idx = cleartext.index(b('\x00'), 2) + except ValueError: + raise DecryptionError('Decryption failed') + + return cleartext[sep_idx + 1:] + + +def sign(message, priv_key, hash): + """Signs the message with the private key. + + Hashes the message, then signs the hash with the given key. This is known + as a "detached signature", because the message itself isn't altered. + + :param message: the message to sign. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param priv_key: the :py:class:`rsa.PrivateKey` to sign with + :param hash: the hash method used on the message. Use 'MD5', 'SHA-1', + 'SHA-256', 'SHA-384' or 'SHA-512'. + :return: a message signature block. + :raise OverflowError: if the private key is too small to contain the + requested hash. + + """ + + # Get the ASN1 code for this hash method + if hash not in HASH_ASN1: + raise ValueError('Invalid hash method: %s' % hash) + asn1code = HASH_ASN1[hash] + + # Calculate the hash + hash = _hash(message, hash) + + # Encrypt the hash with the private key + cleartext = asn1code + hash + keylength = common.byte_size(priv_key.n) + padded = _pad_for_signing(cleartext, keylength) + + payload = transform.bytes2int(padded) + encrypted = priv_key.blinded_encrypt(payload) + block = transform.int2bytes(encrypted, keylength) + + return block + + +def verify(message, signature, pub_key): + """Verifies that the signature matches the message. + + The hash method is detected automatically from the signature. + + :param message: the signed message. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param signature: the signature block, as created with :py:func:`rsa.sign`. + :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. + :raise VerificationError: when the signature doesn't match the message. + + """ + + keylength = common.byte_size(pub_key.n) + encrypted = transform.bytes2int(signature) + decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n) + clearsig = transform.int2bytes(decrypted, keylength) + + # Get the hash method + method_name = _find_method_hash(clearsig) + message_hash = _hash(message, method_name) + + # Reconstruct the expected padded hash + cleartext = HASH_ASN1[method_name] + message_hash + expected = _pad_for_signing(cleartext, keylength) + + # Compare with the signed one + if expected != clearsig: + raise VerificationError('Verification failed') + + return True + + +def _hash(message, method_name): + """Returns the message digest. + + :param message: the signed message. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param method_name: the hash method, must be a key of + :py:const:`HASH_METHODS`. + + """ + + if method_name not in HASH_METHODS: + raise ValueError('Invalid hash method: %s' % method_name) + + method = HASH_METHODS[method_name] + hasher = method() + + if hasattr(message, 'read') and hasattr(message.read, '__call__'): + # Late import to prevent DeprecationWarnings. + from . import varblock + + # read as 1K blocks + for block in varblock.yield_fixedblocks(message, 1024): + hasher.update(block) + else: + # hash the message object itself. + hasher.update(message) + + return hasher.digest() + + +def _find_method_hash(clearsig): + """Finds the hash method. + + :param clearsig: full padded ASN1 and hash. + :return: the used hash method. + :raise VerificationFailed: when the hash method cannot be found + """ + + for (hashname, asn1code) in HASH_ASN1.items(): + if asn1code in clearsig: + return hashname + + raise VerificationError('Verification failed') + + +__all__ = ['encrypt', 'decrypt', 'sign', 'verify', + 'DecryptionError', 'VerificationError', 'CryptoError'] + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(1000): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 100 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/src/lib/rsa/prime.py b/src/lib/rsa/prime.py new file mode 100644 index 00000000..6f23f9da --- /dev/null +++ b/src/lib/rsa/prime.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Numerical functions related to primes. + +Implementation based on the book Algorithm Design by Michael T. Goodrich and +Roberto Tamassia, 2002. +""" + +import rsa.randnum + +__all__ = ['getprime', 'are_relatively_prime'] + + +def gcd(p, q): + """Returns the greatest common divisor of p and q + + >>> gcd(48, 180) + 12 + """ + + while q != 0: + (p, q) = (q, p % q) + return p + + +def miller_rabin_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or prime + (which theoretically is incorrect with error probability 4**-k), by + applying Miller-Rabin primality testing. + + For reference and implementation example, see: + https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test + + :param n: Integer to be tested for primality. + :type n: int + :param k: Number of rounds (witnesses) of Miller-Rabin testing. + :type k: int + :return: False if the number is composite, True if it's probably prime. + :rtype: bool + """ + + # prevent potential infinite loop when d = 0 + if n < 2: + return False + + # Decompose (n - 1) to write it as (2 ** r) * d + # While d is even, divide it by 2 and increase the exponent. + d = n - 1 + r = 0 + + while not (d & 1): + r += 1 + d >>= 1 + + # Test k witnesses. + for _ in range(k): + # Generate random integer a, where 2 <= a <= (n - 2) + a = rsa.randnum.randint(n - 4) + 2 + + x = pow(a, d, n) + if x == 1 or x == n - 1: + continue + + for _ in range(r - 1): + x = pow(x, 2, n) + if x == 1: + # n is composite. + return False + if x == n - 1: + # Exit inner loop and continue with next witness. + break + else: + # If loop doesn't break, n is composite. + return False + + return True + + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + + >>> is_prime(2) + True + >>> is_prime(42) + False + >>> is_prime(41) + True + >>> [x for x in range(901, 1000) if is_prime(x)] + [907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997] + """ + + # Check for small numbers. + if number < 10: + return number in [2, 3, 5, 7] + + # Check for even numbers. + if not (number & 1): + return False + + # According to NIST FIPS 186-4, Appendix C, Table C.3, minimum number of + # rounds of M-R testing, using an error probability of 2 ** (-100), for + # different p, q bitsizes are: + # * p, q bitsize: 512; rounds: 7 + # * p, q bitsize: 1024; rounds: 4 + # * p, q bitsize: 1536; rounds: 3 + # See: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + return miller_rabin_primality_testing(number, 7) + + +def getprime(nbits): + """Returns a prime number that can be stored in 'nbits' bits. + + >>> p = getprime(128) + >>> is_prime(p-1) + False + >>> is_prime(p) + True + >>> is_prime(p+1) + False + + >>> from rsa import common + >>> common.bit_size(p) == 128 + True + """ + + assert nbits > 3 # the loop wil hang on too small numbers + + while True: + integer = rsa.randnum.read_random_odd_int(nbits) + + # Test for primeness + if is_prime(integer): + return integer + + # Retry if not prime + + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + + >>> are_relatively_prime(2, 3) + True + >>> are_relatively_prime(2, 4) + False + """ + + d = gcd(a, b) + return d == 1 + + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(1000): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 100 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/src/lib/rsa/randnum.py b/src/lib/rsa/randnum.py new file mode 100644 index 00000000..3c788a57 --- /dev/null +++ b/src/lib/rsa/randnum.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions for generating random numbers.""" + +# Source inspired by code by Yesudeep Mangalapilly + +import os + +from rsa import common, transform +from rsa._compat import byte + + +def read_random_bits(nbits): + """Reads 'nbits' random bits. + + If nbits isn't a whole number of bytes, an extra byte will be appended with + only the lower bits set. + """ + + nbytes, rbits = divmod(nbits, 8) + + # Get the random bytes + randomdata = os.urandom(nbytes) + + # Add the remaining random bits + if rbits > 0: + randomvalue = ord(os.urandom(1)) + randomvalue >>= (8 - rbits) + randomdata = byte(randomvalue) + randomdata + + return randomdata + + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits. + """ + + randomdata = read_random_bits(nbits) + value = transform.bytes2int(randomdata) + + # Ensure that the number is large enough to just fill out the required + # number of bits. + value |= 1 << (nbits - 1) + + return value + + +def read_random_odd_int(nbits): + """Reads a random odd integer of approximately nbits bits. + + >>> read_random_odd_int(512) & 1 + 1 + """ + + value = read_random_int(nbits) + + # Make sure it's odd + return value | 1 + + +def randint(maxvalue): + """Returns a random integer x with 1 <= x <= maxvalue + + May take a very long time in specific situations. If maxvalue needs N bits + to store, the closer maxvalue is to (2 ** N) - 1, the faster this function + is. + """ + + bit_size = common.bit_size(maxvalue) + + tries = 0 + while True: + value = read_random_int(bit_size) + if value <= maxvalue: + break + + if tries and tries % 10 == 0: + # After a lot of tries to get the right number of bits but still + # smaller than maxvalue, decrease the number of bits by 1. That'll + # dramatically increase the chances to get a large enough number. + bit_size -= 1 + tries += 1 + + return value diff --git a/src/lib/rsa/transform.py b/src/lib/rsa/transform.py new file mode 100644 index 00000000..16061a94 --- /dev/null +++ b/src/lib/rsa/transform.py @@ -0,0 +1,224 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Data transformation functions. + +From bytes to a number, number to bytes, etc. +""" + +from __future__ import absolute_import + +try: + # We'll use psyco if available on 32-bit architectures to speed up code. + # Using psyco (if available) cuts down the execution time on Python 2.5 + # at least by half. + import psyco + + psyco.full() +except ImportError: + pass + +import binascii +from struct import pack +from rsa import common +from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE + + +def bytes2int(raw_bytes): + r"""Converts a list of bytes or an 8-bit string to an integer. + + When using unicode strings, encode it to some encoding like UTF8 first. + + >>> (((128 * 256) + 64) * 256) + 15 + 8405007 + >>> bytes2int(b'\x80@\x0f') + 8405007 + + """ + + return int(binascii.hexlify(raw_bytes), 16) + + +def _int2bytes(number, block_size=None): + r"""Converts a number to a string of bytes. + + Usage:: + + >>> _int2bytes(123456789) + b'\x07[\xcd\x15' + >>> bytes2int(_int2bytes(123456789)) + 123456789 + + >>> _int2bytes(123456789, 6) + b'\x00\x00\x07[\xcd\x15' + >>> bytes2int(_int2bytes(123456789, 128)) + 123456789 + + >>> _int2bytes(123456789, 3) + Traceback (most recent call last): + ... + OverflowError: Needed 4 bytes for number, but block size is 3 + + @param number: the number to convert + @param block_size: the number of bytes to output. If the number encoded to + bytes is less than this, the block will be zero-padded. When not given, + the returned block is not padded. + + @throws OverflowError when block_size is given and the number takes up more + bytes than fit into the block. + """ + + # Type checking + if not is_integer(number): + raise TypeError("You must pass an integer for 'number', not %s" % + number.__class__) + + if number < 0: + raise ValueError('Negative numbers cannot be used: %i' % number) + + # Do some bounds checking + if number == 0: + needed_bytes = 1 + raw_bytes = [ZERO_BYTE] + else: + needed_bytes = common.byte_size(number) + raw_bytes = [] + + # You cannot compare None > 0 in Python 3x. It will fail with a TypeError. + if block_size and block_size > 0: + if needed_bytes > block_size: + raise OverflowError('Needed %i bytes for number, but block size ' + 'is %i' % (needed_bytes, block_size)) + + # Convert the number to bytes. + while number > 0: + raw_bytes.insert(0, byte(number & 0xFF)) + number >>= 8 + + # Pad with zeroes to fill the block + if block_size and block_size > 0: + padding = (block_size - needed_bytes) * ZERO_BYTE + else: + padding = EMPTY_BYTE + + return padding + EMPTY_BYTE.join(raw_bytes) + + +def bytes_leading(raw_bytes, needle=ZERO_BYTE): + """ + Finds the number of prefixed byte occurrences in the haystack. + + Useful when you want to deal with padding. + + :param raw_bytes: + Raw bytes. + :param needle: + The byte to count. Default \000. + :returns: + The number of leading needle bytes. + """ + + leading = 0 + # Indexing keeps compatibility between Python 2.x and Python 3.x + _byte = needle[0] + for x in raw_bytes: + if x == _byte: + leading += 1 + else: + break + return leading + + +def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): + """ + Convert an unsigned integer to bytes (base-256 representation):: + + Does not preserve leading zeros if you don't specify a chunk size or + fill size. + + .. NOTE: + You must not specify both fill_size and chunk_size. Only one + of them is allowed. + + :param number: + Integer value + :param fill_size: + If the optional fill size is given the length of the resulting + byte string is expected to be the fill size and will be padded + with prefix zero bytes to satisfy that length. + :param chunk_size: + If optional chunk size is given and greater than zero, pad the front of + the byte string with binary zeros so that the length is a multiple of + ``chunk_size``. + :param overflow: + ``False`` (default). If this is ``True``, no ``OverflowError`` + will be raised when the fill_size is shorter than the length + of the generated byte sequence. Instead the byte sequence will + be returned as is. + :returns: + Raw bytes (base-256 representation). + :raises: + ``OverflowError`` when fill_size is given and the number takes up more + bytes than fit into the block. This requires the ``overflow`` + argument to this function to be set to ``False`` otherwise, no + error will be raised. + """ + + if number < 0: + raise ValueError("Number must be an unsigned integer: %d" % number) + + if fill_size and chunk_size: + raise ValueError("You can either fill or pad chunks, but not both") + + # Ensure these are integers. + number & 1 + + raw_bytes = b('') + + # Pack the integer one machine word at a time into bytes. + num = number + word_bits, _, max_uint, pack_type = get_word_alignment(num) + pack_format = ">%s" % pack_type + while num > 0: + raw_bytes = pack(pack_format, num & max_uint) + raw_bytes + num >>= word_bits + # Obtain the index of the first non-zero byte. + zero_leading = bytes_leading(raw_bytes) + if number == 0: + raw_bytes = ZERO_BYTE + # De-padding. + raw_bytes = raw_bytes[zero_leading:] + + length = len(raw_bytes) + if fill_size and fill_size > 0: + if not overflow and length > fill_size: + raise OverflowError( + "Need %d bytes for number, but fill size is %d" % + (length, fill_size) + ) + raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE) + elif chunk_size and chunk_size > 0: + remainder = length % chunk_size + if remainder: + padding_size = chunk_size - remainder + raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE) + return raw_bytes + + +if __name__ == '__main__': + import doctest + + doctest.testmod() diff --git a/src/lib/rsa/util.py b/src/lib/rsa/util.py new file mode 100644 index 00000000..29d5eb12 --- /dev/null +++ b/src/lib/rsa/util.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utility functions.""" + +from __future__ import with_statement, print_function + +import sys +from optparse import OptionParser + +import rsa.key + + +def private_to_public(): + """Reads a private key and outputs the corresponding public key.""" + + # Parse the CLI options + parser = OptionParser(usage='usage: %prog [options]', + description='Reads a private key and outputs the ' + 'corresponding public key. Both private and public keys use ' + 'the format described in PKCS#1 v1.5') + + parser.add_option('-i', '--input', dest='infilename', type='string', + help='Input filename. Reads from stdin if not specified') + parser.add_option('-o', '--output', dest='outfilename', type='string', + help='Output filename. Writes to stdout of not specified') + + parser.add_option('--inform', dest='inform', + help='key format of input - default PEM', + choices=('PEM', 'DER'), default='PEM') + + parser.add_option('--outform', dest='outform', + help='key format of output - default PEM', + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv) + + # Read the input data + if cli.infilename: + print('Reading private key from %s in %s format' % + (cli.infilename, cli.inform), file=sys.stderr) + with open(cli.infilename, 'rb') as infile: + in_data = infile.read() + else: + print('Reading private key from stdin in %s format' % cli.inform, + file=sys.stderr) + in_data = sys.stdin.read().encode('ascii') + + assert type(in_data) == bytes, type(in_data) + + # Take the public fields and create a public key + priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform) + pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e) + + # Save to the output file + out_data = pub_key.save_pkcs1(cli.outform) + + if cli.outfilename: + print('Writing public key to %s in %s format' % + (cli.outfilename, cli.outform), file=sys.stderr) + with open(cli.outfilename, 'wb') as outfile: + outfile.write(out_data) + else: + print('Writing public key to stdout in %s format' % cli.outform, + file=sys.stderr) + sys.stdout.write(out_data.decode('ascii')) diff --git a/src/lib/rsa/varblock.py b/src/lib/rsa/varblock.py new file mode 100644 index 00000000..1c8d8390 --- /dev/null +++ b/src/lib/rsa/varblock.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""VARBLOCK file support + +.. deprecated:: 3.4 + + The VARBLOCK format is NOT recommended for general use, has been deprecated since + Python-RSA 3.4, and will be removed in a future release. It's vulnerable to a + number of attacks: + + 1. decrypt/encrypt_bigfile() does not implement `Authenticated encryption`_ nor + uses MACs to verify messages before decrypting public key encrypted messages. + + 2. decrypt/encrypt_bigfile() does not use hybrid encryption (it uses plain RSA) + and has no method for chaining, so block reordering is possible. + + See `issue #19 on Github`_ for more information. + +.. _Authenticated encryption: https://en.wikipedia.org/wiki/Authenticated_encryption +.. _issue #19 on Github: https://github.com/sybrenstuvel/python-rsa/issues/13 + + +The VARBLOCK file format is as follows, where || denotes byte concatenation: + + FILE := VERSION || BLOCK || BLOCK ... + + BLOCK := LENGTH || DATA + + LENGTH := varint-encoded length of the subsequent data. Varint comes from + Google Protobuf, and encodes an integer into a variable number of bytes. + Each byte uses the 7 lowest bits to encode the value. The highest bit set + to 1 indicates the next byte is also part of the varint. The last byte will + have this bit set to 0. + +This file format is called the VARBLOCK format, in line with the varint format +used to denote the block sizes. + +""" + +import warnings + +from rsa._compat import byte, b + +ZERO_BYTE = b('\x00') +VARBLOCK_VERSION = 1 + +warnings.warn("The 'rsa.varblock' module was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://github.com/sybrenstuvel/python-rsa/issues/13 for more information.", + DeprecationWarning) + + +def read_varint(infile): + """Reads a varint from the file. + + When the first byte to be read indicates EOF, (0, 0) is returned. When an + EOF occurs when at least one byte has been read, an EOFError exception is + raised. + + :param infile: the file-like object to read from. It should have a read() + method. + :returns: (varint, length), the read varint and the number of read bytes. + """ + + varint = 0 + read_bytes = 0 + + while True: + char = infile.read(1) + if len(char) == 0: + if read_bytes == 0: + return 0, 0 + raise EOFError('EOF while reading varint, value is %i so far' % + varint) + + byte = ord(char) + varint += (byte & 0x7F) << (7 * read_bytes) + + read_bytes += 1 + + if not byte & 0x80: + return varint, read_bytes + + +def write_varint(outfile, value): + """Writes a varint to a file. + + :param outfile: the file-like object to write to. It should have a write() + method. + :returns: the number of written bytes. + """ + + # there is a big difference between 'write the value 0' (this case) and + # 'there is nothing left to write' (the false-case of the while loop) + + if value == 0: + outfile.write(ZERO_BYTE) + return 1 + + written_bytes = 0 + while value > 0: + to_write = value & 0x7f + value >>= 7 + + if value > 0: + to_write |= 0x80 + + outfile.write(byte(to_write)) + written_bytes += 1 + + return written_bytes + + +def yield_varblocks(infile): + """Generator, yields each block in the input file. + + :param infile: file to read, is expected to have the VARBLOCK format as + described in the module's docstring. + @yields the contents of each block. + """ + + # Check the version number + first_char = infile.read(1) + if len(first_char) == 0: + raise EOFError('Unable to read VARBLOCK version number') + + version = ord(first_char) + if version != VARBLOCK_VERSION: + raise ValueError('VARBLOCK version %i not supported' % version) + + while True: + (block_size, read_bytes) = read_varint(infile) + + # EOF at block boundary, that's fine. + if read_bytes == 0 and block_size == 0: + break + + block = infile.read(block_size) + + read_size = len(block) + if read_size != block_size: + raise EOFError('Block size is %i, but could read only %i bytes' % + (block_size, read_size)) + + yield block + + +def yield_fixedblocks(infile, blocksize): + """Generator, yields each block of ``blocksize`` bytes in the input file. + + :param infile: file to read and separate in blocks. + :returns: a generator that yields the contents of each block + """ + + while True: + block = infile.read(blocksize) + + read_bytes = len(block) + if read_bytes == 0: + break + + yield block + + if read_bytes < blocksize: + break diff --git a/src/lib/sslcrypto/LICENSE b/src/lib/sslcrypto/LICENSE deleted file mode 100644 index 2feefc45..00000000 --- a/src/lib/sslcrypto/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -MIT License - -Copyright (c) 2019 Ivan Machugovskiy - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - -Additionally, the following licenses must be preserved: - -- ripemd implementation is licensed under BSD-3 by Markus Friedl, see `_ripemd.py`; -- jacobian curve implementation is dual-licensed under MIT or public domain license, see `_jacobian.py`. diff --git a/src/lib/sslcrypto/__init__.py b/src/lib/sslcrypto/__init__.py deleted file mode 100644 index 77f9b3f3..00000000 --- a/src/lib/sslcrypto/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -__all__ = ["aes", "ecc", "rsa"] - -try: - from .openssl import aes, ecc, rsa -except OSError: - from .fallback import aes, ecc, rsa diff --git a/src/lib/sslcrypto/_aes.py b/src/lib/sslcrypto/_aes.py deleted file mode 100644 index 4f8d4ec2..00000000 --- a/src/lib/sslcrypto/_aes.py +++ /dev/null @@ -1,53 +0,0 @@ -# pylint: disable=import-outside-toplevel - -class AES: - def __init__(self, backend, fallback=None): - self._backend = backend - self._fallback = fallback - - - def get_algo_key_length(self, algo): - if algo.count("-") != 2: - raise ValueError("Invalid algorithm name") - try: - return int(algo.split("-")[1]) // 8 - except ValueError: - raise ValueError("Invalid algorithm name") from None - - - def new_key(self, algo="aes-256-cbc"): - if not self._backend.is_algo_supported(algo): - if self._fallback is None: - raise ValueError("This algorithm is not supported") - return self._fallback.new_key(algo) - return self._backend.random(self.get_algo_key_length(algo)) - - - def encrypt(self, data, key, algo="aes-256-cbc"): - if not self._backend.is_algo_supported(algo): - if self._fallback is None: - raise ValueError("This algorithm is not supported") - return self._fallback.encrypt(data, key, algo) - - key_length = self.get_algo_key_length(algo) - if len(key) != key_length: - raise ValueError("Expected key to be {} bytes, got {} bytes".format(key_length, len(key))) - - return self._backend.encrypt(data, key, algo) - - - def decrypt(self, ciphertext, iv, key, algo="aes-256-cbc"): - if not self._backend.is_algo_supported(algo): - if self._fallback is None: - raise ValueError("This algorithm is not supported") - return self._fallback.decrypt(ciphertext, iv, key, algo) - - key_length = self.get_algo_key_length(algo) - if len(key) != key_length: - raise ValueError("Expected key to be {} bytes, got {} bytes".format(key_length, len(key))) - - return self._backend.decrypt(ciphertext, iv, key, algo) - - - def get_backend(self): - return self._backend.get_backend() diff --git a/src/lib/sslcrypto/_ecc.py b/src/lib/sslcrypto/_ecc.py deleted file mode 100644 index 88e04576..00000000 --- a/src/lib/sslcrypto/_ecc.py +++ /dev/null @@ -1,506 +0,0 @@ -import hashlib -import struct -import hmac -import base58 - - -try: - hashlib.new("ripemd160") -except ValueError: - # No native implementation - from . import _ripemd - def ripemd160(*args): - return _ripemd.new(*args) -else: - # Use OpenSSL - def ripemd160(*args): - return hashlib.new("ripemd160", *args) - - -class ECC: - # pylint: disable=line-too-long - # name: (nid, p, n, a, b, (Gx, Gy)), - CURVES = { - "secp112r1": ( - 704, - 0xDB7C2ABF62E35E668076BEAD208B, - 0xDB7C2ABF62E35E7628DFAC6561C5, - 0xDB7C2ABF62E35E668076BEAD2088, - 0x659EF8BA043916EEDE8911702B22, - ( - 0x09487239995A5EE76B55F9C2F098, - 0xA89CE5AF8724C0A23E0E0FF77500 - ) - ), - "secp112r2": ( - 705, - 0xDB7C2ABF62E35E668076BEAD208B, - 0x36DF0AAFD8B8D7597CA10520D04B, - 0x6127C24C05F38A0AAAF65C0EF02C, - 0x51DEF1815DB5ED74FCC34C85D709, - ( - 0x4BA30AB5E892B4E1649DD0928643, - 0xADCD46F5882E3747DEF36E956E97 - ) - ), - "secp128r1": ( - 706, - 0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF, - 0xFFFFFFFE0000000075A30D1B9038A115, - 0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFC, - 0xE87579C11079F43DD824993C2CEE5ED3, - ( - 0x161FF7528B899B2D0C28607CA52C5B86, - 0xCF5AC8395BAFEB13C02DA292DDED7A83 - ) - ), - "secp128r2": ( - 707, - 0xFFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF, - 0x3FFFFFFF7FFFFFFFBE0024720613B5A3, - 0xD6031998D1B3BBFEBF59CC9BBFF9AEE1, - 0x5EEEFCA380D02919DC2C6558BB6D8A5D, - ( - 0x7B6AA5D85E572983E6FB32A7CDEBC140, - 0x27B6916A894D3AEE7106FE805FC34B44 - ) - ), - "secp160k1": ( - 708, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73, - 0x0100000000000000000001B8FA16DFAB9ACA16B6B3, - 0, - 7, - ( - 0x3B4C382CE37AA192A4019E763036F4F5DD4D7EBB, - 0x938CF935318FDCED6BC28286531733C3F03C4FEE - ) - ), - "secp160r1": ( - 709, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFF, - 0x0100000000000000000001F4C8F927AED3CA752257, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFC, - 0x001C97BEFC54BD7A8B65ACF89F81D4D4ADC565FA45, - ( - 0x4A96B5688EF573284664698968C38BB913CBFC82, - 0x23A628553168947D59DCC912042351377AC5FB32 - ) - ), - "secp160r2": ( - 710, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73, - 0x0100000000000000000000351EE786A818F3A1A16B, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC70, - 0x00B4E134D3FB59EB8BAB57274904664D5AF50388BA, - ( - 0x52DCB034293A117E1F4FF11B30F7199D3144CE6D, - 0xFEAFFEF2E331F296E071FA0DF9982CFEA7D43F2E - ) - ), - "secp192k1": ( - 711, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFEE37, - 0xFFFFFFFFFFFFFFFFFFFFFFFE26F2FC170F69466A74DEFD8D, - 0, - 3, - ( - 0xDB4FF10EC057E9AE26B07D0280B7F4341DA5D1B1EAE06C7D, - 0x9B2F2F6D9C5628A7844163D015BE86344082AA88D95E2F9D - ) - ), - "prime192v1": ( - 409, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF, - 0xFFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC, - 0x64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1, - ( - 0x188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012, - 0x07192B95FFC8DA78631011ED6B24CDD573F977A11E794811 - ) - ), - "secp224k1": ( - 712, - 0x00FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFE56D, - 0x010000000000000000000000000001DCE8D2EC6184CAF0A971769FB1F7, - 0, - 5, - ( - 0xA1455B334DF099DF30FC28A169A467E9E47075A90F7E650EB6B7A45C, - 0x7E089FED7FBA344282CAFBD6F7E319F7C0B0BD59E2CA4BDB556D61A5 - ) - ), - "secp224r1": ( - 713, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000001, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFFFFFFFFFE, - 0xB4050A850C04B3ABF54132565044B0B7D7BFD8BA270B39432355FFB4, - ( - 0xB70E0CBD6BB4BF7F321390B94A03C1D356C21122343280D6115C1D21, - 0xBD376388B5F723FB4C22DFE6CD4375A05A07476444D5819985007E34 - ) - ), - "secp256k1": ( - 714, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141, - 0, - 7, - ( - 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, - 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 - ) - ), - "prime256v1": ( - 715, - 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF, - 0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551, - 0xFFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC, - 0x5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B, - ( - 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296, - 0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5 - ) - ), - "secp384r1": ( - 716, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFF, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF581A0DB248B0A77AECEC196ACCC52973, - 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFF0000000000000000FFFFFFFC, - 0xB3312FA7E23EE7E4988E056BE3F82D19181D9C6EFE8141120314088F5013875AC656398D8A2ED19D2A85C8EDD3EC2AEF, - ( - 0xAA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A385502F25DBF55296C3A545E3872760AB7, - 0x3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C00A60B1CE1D7E819D7A431D7C90EA0E5F - ) - ), - "secp521r1": ( - 717, - 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF, - 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E91386409, - 0x01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC, - 0x0051953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F00, - ( - 0x00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5BD66, - 0x011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD16650 - ) - ) - } - # pylint: enable=line-too-long - - def __init__(self, backend, aes): - self._backend = backend - self._aes = aes - - - def get_curve(self, name): - if name not in self.CURVES: - raise ValueError("Unknown curve {}".format(name)) - nid, p, n, a, b, g = self.CURVES[name] - return EllipticCurve(self._backend(p, n, a, b, g), self._aes, nid) - - - def get_backend(self): - return self._backend.get_backend() - - -class EllipticCurve: - def __init__(self, backend, aes, nid): - self._backend = backend - self._aes = aes - self.nid = nid - - - def _encode_public_key(self, x, y, is_compressed=True, raw=True): - if raw: - if is_compressed: - return bytes([0x02 + (y[-1] % 2)]) + x - else: - return bytes([0x04]) + x + y - else: - return struct.pack("!HH", self.nid, len(x)) + x + struct.pack("!H", len(y)) + y - - - def _decode_public_key(self, public_key, partial=False): - if not public_key: - raise ValueError("No public key") - - if public_key[0] == 0x04: - # Uncompressed - expected_length = 1 + 2 * self._backend.public_key_length - if partial: - if len(public_key) < expected_length: - raise ValueError("Invalid uncompressed public key length") - else: - if len(public_key) != expected_length: - raise ValueError("Invalid uncompressed public key length") - x = public_key[1:1 + self._backend.public_key_length] - y = public_key[1 + self._backend.public_key_length:expected_length] - if partial: - return (x, y), expected_length - else: - return x, y - elif public_key[0] in (0x02, 0x03): - # Compressed - expected_length = 1 + self._backend.public_key_length - if partial: - if len(public_key) < expected_length: - raise ValueError("Invalid compressed public key length") - else: - if len(public_key) != expected_length: - raise ValueError("Invalid compressed public key length") - - x, y = self._backend.decompress_point(public_key[:expected_length]) - # Sanity check - if x != public_key[1:expected_length]: - raise ValueError("Incorrect compressed public key") - if partial: - return (x, y), expected_length - else: - return x, y - else: - raise ValueError("Invalid public key prefix") - - - def _decode_public_key_openssl(self, public_key, partial=False): - if not public_key: - raise ValueError("No public key") - - i = 0 - - nid, = struct.unpack("!H", public_key[i:i + 2]) - i += 2 - if nid != self.nid: - raise ValueError("Wrong curve") - - xlen, = struct.unpack("!H", public_key[i:i + 2]) - i += 2 - if len(public_key) - i < xlen: - raise ValueError("Too short public key") - x = public_key[i:i + xlen] - i += xlen - - ylen, = struct.unpack("!H", public_key[i:i + 2]) - i += 2 - if len(public_key) - i < ylen: - raise ValueError("Too short public key") - y = public_key[i:i + ylen] - i += ylen - - if partial: - return (x, y), i - else: - if i < len(public_key): - raise ValueError("Too long public key") - return x, y - - - def new_private_key(self, is_compressed=False): - return self._backend.new_private_key() + (b"\x01" if is_compressed else b"") - - - def private_to_public(self, private_key): - if len(private_key) == self._backend.public_key_length: - is_compressed = False - elif len(private_key) == self._backend.public_key_length + 1 and private_key[-1] == 1: - is_compressed = True - private_key = private_key[:-1] - else: - raise ValueError("Private key has invalid length") - x, y = self._backend.private_to_public(private_key) - return self._encode_public_key(x, y, is_compressed=is_compressed) - - - def private_to_wif(self, private_key): - return base58.b58encode_check(b"\x80" + private_key) - - - def wif_to_private(self, wif): - dec = base58.b58decode_check(wif) - if dec[0] != 0x80: - raise ValueError("Invalid network (expected mainnet)") - return dec[1:] - - - def public_to_address(self, public_key): - h = hashlib.sha256(public_key).digest() - hash160 = ripemd160(h).digest() - return base58.b58encode_check(b"\x00" + hash160) - - - def private_to_address(self, private_key): - # Kinda useless but left for quick migration from pybitcointools - return self.public_to_address(self.private_to_public(private_key)) - - - def derive(self, private_key, public_key): - if len(private_key) == self._backend.public_key_length + 1 and private_key[-1] == 1: - private_key = private_key[:-1] - if len(private_key) != self._backend.public_key_length: - raise ValueError("Private key has invalid length") - if not isinstance(public_key, tuple): - public_key = self._decode_public_key(public_key) - return self._backend.ecdh(private_key, public_key) - - - def _digest(self, data, hash): - if hash is None: - return data - elif callable(hash): - return hash(data) - elif hash == "sha1": - return hashlib.sha1(data).digest() - elif hash == "sha256": - return hashlib.sha256(data).digest() - elif hash == "sha512": - return hashlib.sha512(data).digest() - else: - raise ValueError("Unknown hash/derivation method") - - - # High-level functions - def encrypt(self, data, public_key, algo="aes-256-cbc", derivation="sha256", mac="hmac-sha256", return_aes_key=False): - # Generate ephemeral private key - private_key = self.new_private_key() - - # Derive key - ecdh = self.derive(private_key, public_key) - key = self._digest(ecdh, derivation) - k_enc_len = self._aes.get_algo_key_length(algo) - if len(key) < k_enc_len: - raise ValueError("Too short digest") - k_enc, k_mac = key[:k_enc_len], key[k_enc_len:] - - # Encrypt - ciphertext, iv = self._aes.encrypt(data, k_enc, algo=algo) - ephem_public_key = self.private_to_public(private_key) - ephem_public_key = self._decode_public_key(ephem_public_key) - ephem_public_key = self._encode_public_key(*ephem_public_key, raw=False) - ciphertext = iv + ephem_public_key + ciphertext - - # Add MAC tag - if callable(mac): - tag = mac(k_mac, ciphertext) - elif mac == "hmac-sha256": - h = hmac.new(k_mac, digestmod="sha256") - h.update(ciphertext) - tag = h.digest() - elif mac == "hmac-sha512": - h = hmac.new(k_mac, digestmod="sha512") - h.update(ciphertext) - tag = h.digest() - elif mac is None: - tag = b"" - else: - raise ValueError("Unsupported MAC") - - if return_aes_key: - return ciphertext + tag, k_enc - else: - return ciphertext + tag - - - def decrypt(self, ciphertext, private_key, algo="aes-256-cbc", derivation="sha256", mac="hmac-sha256"): - # Get MAC tag - if callable(mac): - tag_length = mac.digest_size - elif mac == "hmac-sha256": - tag_length = hmac.new(b"", digestmod="sha256").digest_size - elif mac == "hmac-sha512": - tag_length = hmac.new(b"", digestmod="sha512").digest_size - elif mac is None: - tag_length = 0 - else: - raise ValueError("Unsupported MAC") - - if len(ciphertext) < tag_length: - raise ValueError("Ciphertext is too small to contain MAC tag") - if tag_length == 0: - tag = b"" - else: - ciphertext, tag = ciphertext[:-tag_length], ciphertext[-tag_length:] - - orig_ciphertext = ciphertext - - if len(ciphertext) < 16: - raise ValueError("Ciphertext is too small to contain IV") - iv, ciphertext = ciphertext[:16], ciphertext[16:] - - public_key, pos = self._decode_public_key_openssl(ciphertext, partial=True) - ciphertext = ciphertext[pos:] - - # Derive key - ecdh = self.derive(private_key, public_key) - key = self._digest(ecdh, derivation) - k_enc_len = self._aes.get_algo_key_length(algo) - if len(key) < k_enc_len: - raise ValueError("Too short digest") - k_enc, k_mac = key[:k_enc_len], key[k_enc_len:] - - # Verify MAC tag - if callable(mac): - expected_tag = mac(k_mac, orig_ciphertext) - elif mac == "hmac-sha256": - h = hmac.new(k_mac, digestmod="sha256") - h.update(orig_ciphertext) - expected_tag = h.digest() - elif mac == "hmac-sha512": - h = hmac.new(k_mac, digestmod="sha512") - h.update(orig_ciphertext) - expected_tag = h.digest() - elif mac is None: - expected_tag = b"" - - if not hmac.compare_digest(tag, expected_tag): - raise ValueError("Invalid MAC tag") - - return self._aes.decrypt(ciphertext, iv, k_enc, algo=algo) - - - def sign(self, data, private_key, hash="sha256", recoverable=False, entropy=None): - if len(private_key) == self._backend.public_key_length: - is_compressed = False - elif len(private_key) == self._backend.public_key_length + 1 and private_key[-1] == 1: - is_compressed = True - private_key = private_key[:-1] - else: - raise ValueError("Private key has invalid length") - - data = self._digest(data, hash) - if not entropy: - v = b"\x01" * len(data) - k = b"\x00" * len(data) - k = hmac.new(k, v + b"\x00" + private_key + data, "sha256").digest() - v = hmac.new(k, v, "sha256").digest() - k = hmac.new(k, v + b"\x01" + private_key + data, "sha256").digest() - v = hmac.new(k, v, "sha256").digest() - entropy = hmac.new(k, v, "sha256").digest() - return self._backend.sign(data, private_key, recoverable, is_compressed, entropy=entropy) - - - def recover(self, signature, data, hash="sha256"): - # Sanity check: is this signature recoverable? - if len(signature) != 1 + 2 * self._backend.public_key_length: - raise ValueError("Cannot recover an unrecoverable signature") - x, y = self._backend.recover(signature, self._digest(data, hash)) - is_compressed = signature[0] >= 31 - return self._encode_public_key(x, y, is_compressed=is_compressed) - - - def verify(self, signature, data, public_key, hash="sha256"): - if len(signature) == 1 + 2 * self._backend.public_key_length: - # Recoverable signature - signature = signature[1:] - if len(signature) != 2 * self._backend.public_key_length: - raise ValueError("Invalid signature format") - if not isinstance(public_key, tuple): - public_key = self._decode_public_key(public_key) - return self._backend.verify(signature, self._digest(data, hash), public_key) - - - def derive_child(self, seed, child): - # Based on BIP32 - if not 0 <= child < 2 ** 31: - raise ValueError("Invalid child index") - return self._backend.derive_child(seed, child) diff --git a/src/lib/sslcrypto/_ripemd.py b/src/lib/sslcrypto/_ripemd.py deleted file mode 100644 index 89377cc2..00000000 --- a/src/lib/sslcrypto/_ripemd.py +++ /dev/null @@ -1,375 +0,0 @@ -# Copyright (c) 2001 Markus Friedl. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# 1. Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# 2. Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR -# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES -# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. -# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT -# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF -# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# pylint: skip-file - -import sys - -digest_size = 20 -digestsize = 20 - -class RIPEMD160: - """ - Return a new RIPEMD160 object. An optional string argument - may be provided; if present, this string will be automatically - hashed. - """ - - def __init__(self, arg=None): - self.ctx = RMDContext() - if arg: - self.update(arg) - self.dig = None - - def update(self, arg): - RMD160Update(self.ctx, arg, len(arg)) - self.dig = None - - def digest(self): - if self.dig: - return self.dig - ctx = self.ctx.copy() - self.dig = RMD160Final(self.ctx) - self.ctx = ctx - return self.dig - - def hexdigest(self): - dig = self.digest() - hex_digest = "" - for d in dig: - hex_digest += "%02x" % d - return hex_digest - - def copy(self): - import copy - return copy.deepcopy(self) - - - -def new(arg=None): - """ - Return a new RIPEMD160 object. An optional string argument - may be provided; if present, this string will be automatically - hashed. - """ - return RIPEMD160(arg) - - - -# -# Private. -# - -class RMDContext: - def __init__(self): - self.state = [0x67452301, 0xEFCDAB89, 0x98BADCFE, - 0x10325476, 0xC3D2E1F0] # uint32 - self.count = 0 # uint64 - self.buffer = [0] * 64 # uchar - def copy(self): - ctx = RMDContext() - ctx.state = self.state[:] - ctx.count = self.count - ctx.buffer = self.buffer[:] - return ctx - -K0 = 0x00000000 -K1 = 0x5A827999 -K2 = 0x6ED9EBA1 -K3 = 0x8F1BBCDC -K4 = 0xA953FD4E - -KK0 = 0x50A28BE6 -KK1 = 0x5C4DD124 -KK2 = 0x6D703EF3 -KK3 = 0x7A6D76E9 -KK4 = 0x00000000 - -def ROL(n, x): - return ((x << n) & 0xffffffff) | (x >> (32 - n)) - -def F0(x, y, z): - return x ^ y ^ z - -def F1(x, y, z): - return (x & y) | (((~x) % 0x100000000) & z) - -def F2(x, y, z): - return (x | ((~y) % 0x100000000)) ^ z - -def F3(x, y, z): - return (x & z) | (((~z) % 0x100000000) & y) - -def F4(x, y, z): - return x ^ (y | ((~z) % 0x100000000)) - -def R(a, b, c, d, e, Fj, Kj, sj, rj, X): - a = ROL(sj, (a + Fj(b, c, d) + X[rj] + Kj) % 0x100000000) + e - c = ROL(10, c) - return a % 0x100000000, c - -PADDING = [0x80] + [0] * 63 - -import sys -import struct - -def RMD160Transform(state, block): # uint32 state[5], uchar block[64] - x = [0] * 16 - if sys.byteorder == "little": - x = struct.unpack("<16L", bytes(block[0:64])) - else: - raise ValueError("Big-endian platforms are not supported") - a = state[0] - b = state[1] - c = state[2] - d = state[3] - e = state[4] - - # Round 1 - a, c = R(a, b, c, d, e, F0, K0, 11, 0, x) - e, b = R(e, a, b, c, d, F0, K0, 14, 1, x) - d, a = R(d, e, a, b, c, F0, K0, 15, 2, x) - c, e = R(c, d, e, a, b, F0, K0, 12, 3, x) - b, d = R(b, c, d, e, a, F0, K0, 5, 4, x) - a, c = R(a, b, c, d, e, F0, K0, 8, 5, x) - e, b = R(e, a, b, c, d, F0, K0, 7, 6, x) - d, a = R(d, e, a, b, c, F0, K0, 9, 7, x) - c, e = R(c, d, e, a, b, F0, K0, 11, 8, x) - b, d = R(b, c, d, e, a, F0, K0, 13, 9, x) - a, c = R(a, b, c, d, e, F0, K0, 14, 10, x) - e, b = R(e, a, b, c, d, F0, K0, 15, 11, x) - d, a = R(d, e, a, b, c, F0, K0, 6, 12, x) - c, e = R(c, d, e, a, b, F0, K0, 7, 13, x) - b, d = R(b, c, d, e, a, F0, K0, 9, 14, x) - a, c = R(a, b, c, d, e, F0, K0, 8, 15, x) # #15 - # Round 2 - e, b = R(e, a, b, c, d, F1, K1, 7, 7, x) - d, a = R(d, e, a, b, c, F1, K1, 6, 4, x) - c, e = R(c, d, e, a, b, F1, K1, 8, 13, x) - b, d = R(b, c, d, e, a, F1, K1, 13, 1, x) - a, c = R(a, b, c, d, e, F1, K1, 11, 10, x) - e, b = R(e, a, b, c, d, F1, K1, 9, 6, x) - d, a = R(d, e, a, b, c, F1, K1, 7, 15, x) - c, e = R(c, d, e, a, b, F1, K1, 15, 3, x) - b, d = R(b, c, d, e, a, F1, K1, 7, 12, x) - a, c = R(a, b, c, d, e, F1, K1, 12, 0, x) - e, b = R(e, a, b, c, d, F1, K1, 15, 9, x) - d, a = R(d, e, a, b, c, F1, K1, 9, 5, x) - c, e = R(c, d, e, a, b, F1, K1, 11, 2, x) - b, d = R(b, c, d, e, a, F1, K1, 7, 14, x) - a, c = R(a, b, c, d, e, F1, K1, 13, 11, x) - e, b = R(e, a, b, c, d, F1, K1, 12, 8, x) # #31 - # Round 3 - d, a = R(d, e, a, b, c, F2, K2, 11, 3, x) - c, e = R(c, d, e, a, b, F2, K2, 13, 10, x) - b, d = R(b, c, d, e, a, F2, K2, 6, 14, x) - a, c = R(a, b, c, d, e, F2, K2, 7, 4, x) - e, b = R(e, a, b, c, d, F2, K2, 14, 9, x) - d, a = R(d, e, a, b, c, F2, K2, 9, 15, x) - c, e = R(c, d, e, a, b, F2, K2, 13, 8, x) - b, d = R(b, c, d, e, a, F2, K2, 15, 1, x) - a, c = R(a, b, c, d, e, F2, K2, 14, 2, x) - e, b = R(e, a, b, c, d, F2, K2, 8, 7, x) - d, a = R(d, e, a, b, c, F2, K2, 13, 0, x) - c, e = R(c, d, e, a, b, F2, K2, 6, 6, x) - b, d = R(b, c, d, e, a, F2, K2, 5, 13, x) - a, c = R(a, b, c, d, e, F2, K2, 12, 11, x) - e, b = R(e, a, b, c, d, F2, K2, 7, 5, x) - d, a = R(d, e, a, b, c, F2, K2, 5, 12, x) # #47 - # Round 4 - c, e = R(c, d, e, a, b, F3, K3, 11, 1, x) - b, d = R(b, c, d, e, a, F3, K3, 12, 9, x) - a, c = R(a, b, c, d, e, F3, K3, 14, 11, x) - e, b = R(e, a, b, c, d, F3, K3, 15, 10, x) - d, a = R(d, e, a, b, c, F3, K3, 14, 0, x) - c, e = R(c, d, e, a, b, F3, K3, 15, 8, x) - b, d = R(b, c, d, e, a, F3, K3, 9, 12, x) - a, c = R(a, b, c, d, e, F3, K3, 8, 4, x) - e, b = R(e, a, b, c, d, F3, K3, 9, 13, x) - d, a = R(d, e, a, b, c, F3, K3, 14, 3, x) - c, e = R(c, d, e, a, b, F3, K3, 5, 7, x) - b, d = R(b, c, d, e, a, F3, K3, 6, 15, x) - a, c = R(a, b, c, d, e, F3, K3, 8, 14, x) - e, b = R(e, a, b, c, d, F3, K3, 6, 5, x) - d, a = R(d, e, a, b, c, F3, K3, 5, 6, x) - c, e = R(c, d, e, a, b, F3, K3, 12, 2, x) # #63 - # Round 5 - b, d = R(b, c, d, e, a, F4, K4, 9, 4, x) - a, c = R(a, b, c, d, e, F4, K4, 15, 0, x) - e, b = R(e, a, b, c, d, F4, K4, 5, 5, x) - d, a = R(d, e, a, b, c, F4, K4, 11, 9, x) - c, e = R(c, d, e, a, b, F4, K4, 6, 7, x) - b, d = R(b, c, d, e, a, F4, K4, 8, 12, x) - a, c = R(a, b, c, d, e, F4, K4, 13, 2, x) - e, b = R(e, a, b, c, d, F4, K4, 12, 10, x) - d, a = R(d, e, a, b, c, F4, K4, 5, 14, x) - c, e = R(c, d, e, a, b, F4, K4, 12, 1, x) - b, d = R(b, c, d, e, a, F4, K4, 13, 3, x) - a, c = R(a, b, c, d, e, F4, K4, 14, 8, x) - e, b = R(e, a, b, c, d, F4, K4, 11, 11, x) - d, a = R(d, e, a, b, c, F4, K4, 8, 6, x) - c, e = R(c, d, e, a, b, F4, K4, 5, 15, x) - b, d = R(b, c, d, e, a, F4, K4, 6, 13, x) # #79 - - aa = a - bb = b - cc = c - dd = d - ee = e - - a = state[0] - b = state[1] - c = state[2] - d = state[3] - e = state[4] - - # Parallel round 1 - a, c = R(a, b, c, d, e, F4, KK0, 8, 5, x) - e, b = R(e, a, b, c, d, F4, KK0, 9, 14, x) - d, a = R(d, e, a, b, c, F4, KK0, 9, 7, x) - c, e = R(c, d, e, a, b, F4, KK0, 11, 0, x) - b, d = R(b, c, d, e, a, F4, KK0, 13, 9, x) - a, c = R(a, b, c, d, e, F4, KK0, 15, 2, x) - e, b = R(e, a, b, c, d, F4, KK0, 15, 11, x) - d, a = R(d, e, a, b, c, F4, KK0, 5, 4, x) - c, e = R(c, d, e, a, b, F4, KK0, 7, 13, x) - b, d = R(b, c, d, e, a, F4, KK0, 7, 6, x) - a, c = R(a, b, c, d, e, F4, KK0, 8, 15, x) - e, b = R(e, a, b, c, d, F4, KK0, 11, 8, x) - d, a = R(d, e, a, b, c, F4, KK0, 14, 1, x) - c, e = R(c, d, e, a, b, F4, KK0, 14, 10, x) - b, d = R(b, c, d, e, a, F4, KK0, 12, 3, x) - a, c = R(a, b, c, d, e, F4, KK0, 6, 12, x) # #15 - # Parallel round 2 - e, b = R(e, a, b, c, d, F3, KK1, 9, 6, x) - d, a = R(d, e, a, b, c, F3, KK1, 13, 11, x) - c, e = R(c, d, e, a, b, F3, KK1, 15, 3, x) - b, d = R(b, c, d, e, a, F3, KK1, 7, 7, x) - a, c = R(a, b, c, d, e, F3, KK1, 12, 0, x) - e, b = R(e, a, b, c, d, F3, KK1, 8, 13, x) - d, a = R(d, e, a, b, c, F3, KK1, 9, 5, x) - c, e = R(c, d, e, a, b, F3, KK1, 11, 10, x) - b, d = R(b, c, d, e, a, F3, KK1, 7, 14, x) - a, c = R(a, b, c, d, e, F3, KK1, 7, 15, x) - e, b = R(e, a, b, c, d, F3, KK1, 12, 8, x) - d, a = R(d, e, a, b, c, F3, KK1, 7, 12, x) - c, e = R(c, d, e, a, b, F3, KK1, 6, 4, x) - b, d = R(b, c, d, e, a, F3, KK1, 15, 9, x) - a, c = R(a, b, c, d, e, F3, KK1, 13, 1, x) - e, b = R(e, a, b, c, d, F3, KK1, 11, 2, x) # #31 - # Parallel round 3 - d, a = R(d, e, a, b, c, F2, KK2, 9, 15, x) - c, e = R(c, d, e, a, b, F2, KK2, 7, 5, x) - b, d = R(b, c, d, e, a, F2, KK2, 15, 1, x) - a, c = R(a, b, c, d, e, F2, KK2, 11, 3, x) - e, b = R(e, a, b, c, d, F2, KK2, 8, 7, x) - d, a = R(d, e, a, b, c, F2, KK2, 6, 14, x) - c, e = R(c, d, e, a, b, F2, KK2, 6, 6, x) - b, d = R(b, c, d, e, a, F2, KK2, 14, 9, x) - a, c = R(a, b, c, d, e, F2, KK2, 12, 11, x) - e, b = R(e, a, b, c, d, F2, KK2, 13, 8, x) - d, a = R(d, e, a, b, c, F2, KK2, 5, 12, x) - c, e = R(c, d, e, a, b, F2, KK2, 14, 2, x) - b, d = R(b, c, d, e, a, F2, KK2, 13, 10, x) - a, c = R(a, b, c, d, e, F2, KK2, 13, 0, x) - e, b = R(e, a, b, c, d, F2, KK2, 7, 4, x) - d, a = R(d, e, a, b, c, F2, KK2, 5, 13, x) # #47 - # Parallel round 4 - c, e = R(c, d, e, a, b, F1, KK3, 15, 8, x) - b, d = R(b, c, d, e, a, F1, KK3, 5, 6, x) - a, c = R(a, b, c, d, e, F1, KK3, 8, 4, x) - e, b = R(e, a, b, c, d, F1, KK3, 11, 1, x) - d, a = R(d, e, a, b, c, F1, KK3, 14, 3, x) - c, e = R(c, d, e, a, b, F1, KK3, 14, 11, x) - b, d = R(b, c, d, e, a, F1, KK3, 6, 15, x) - a, c = R(a, b, c, d, e, F1, KK3, 14, 0, x) - e, b = R(e, a, b, c, d, F1, KK3, 6, 5, x) - d, a = R(d, e, a, b, c, F1, KK3, 9, 12, x) - c, e = R(c, d, e, a, b, F1, KK3, 12, 2, x) - b, d = R(b, c, d, e, a, F1, KK3, 9, 13, x) - a, c = R(a, b, c, d, e, F1, KK3, 12, 9, x) - e, b = R(e, a, b, c, d, F1, KK3, 5, 7, x) - d, a = R(d, e, a, b, c, F1, KK3, 15, 10, x) - c, e = R(c, d, e, a, b, F1, KK3, 8, 14, x) # #63 - # Parallel round 5 - b, d = R(b, c, d, e, a, F0, KK4, 8, 12, x) - a, c = R(a, b, c, d, e, F0, KK4, 5, 15, x) - e, b = R(e, a, b, c, d, F0, KK4, 12, 10, x) - d, a = R(d, e, a, b, c, F0, KK4, 9, 4, x) - c, e = R(c, d, e, a, b, F0, KK4, 12, 1, x) - b, d = R(b, c, d, e, a, F0, KK4, 5, 5, x) - a, c = R(a, b, c, d, e, F0, KK4, 14, 8, x) - e, b = R(e, a, b, c, d, F0, KK4, 6, 7, x) - d, a = R(d, e, a, b, c, F0, KK4, 8, 6, x) - c, e = R(c, d, e, a, b, F0, KK4, 13, 2, x) - b, d = R(b, c, d, e, a, F0, KK4, 6, 13, x) - a, c = R(a, b, c, d, e, F0, KK4, 5, 14, x) - e, b = R(e, a, b, c, d, F0, KK4, 15, 0, x) - d, a = R(d, e, a, b, c, F0, KK4, 13, 3, x) - c, e = R(c, d, e, a, b, F0, KK4, 11, 9, x) - b, d = R(b, c, d, e, a, F0, KK4, 11, 11, x) # #79 - - t = (state[1] + cc + d) % 0x100000000 - state[1] = (state[2] + dd + e) % 0x100000000 - state[2] = (state[3] + ee + a) % 0x100000000 - state[3] = (state[4] + aa + b) % 0x100000000 - state[4] = (state[0] + bb + c) % 0x100000000 - state[0] = t % 0x100000000 - - -def RMD160Update(ctx, inp, inplen): - if type(inp) == str: - inp = [ord(i)&0xff for i in inp] - - have = int((ctx.count // 8) % 64) - inplen = int(inplen) - need = 64 - have - ctx.count += 8 * inplen - off = 0 - if inplen >= need: - if have: - for i in range(need): - ctx.buffer[have + i] = inp[i] - RMD160Transform(ctx.state, ctx.buffer) - off = need - have = 0 - while off + 64 <= inplen: - RMD160Transform(ctx.state, inp[off:]) #<--- - off += 64 - if off < inplen: - # memcpy(ctx->buffer + have, input+off, len-off) - for i in range(inplen - off): - ctx.buffer[have + i] = inp[off + i] - -def RMD160Final(ctx): - size = struct.pack("= self.n: - return self.jacobian_multiply(a, n % self.n, secret) - half = self.jacobian_multiply(a, n // 2, secret) - half_sq = self.jacobian_double(half) - if secret: - # A constant-time implementation - half_sq_a = self.jacobian_add(half_sq, a) - if n % 2 == 0: - result = half_sq - if n % 2 == 1: - result = half_sq_a - return result - else: - if n % 2 == 0: - return half_sq - return self.jacobian_add(half_sq, a) - - - def jacobian_shamir(self, a, n, b, m): - ab = self.jacobian_add(a, b) - if n < 0 or n >= self.n: - n %= self.n - if m < 0 or m >= self.n: - m %= self.n - res = 0, 0, 1 # point on infinity - for i in range(self.n_length - 1, -1, -1): - res = self.jacobian_double(res) - has_n = n & (1 << i) - has_m = m & (1 << i) - if has_n: - if has_m == 0: - res = self.jacobian_add(res, a) - if has_m != 0: - res = self.jacobian_add(res, ab) - else: - if has_m == 0: - res = self.jacobian_add(res, (0, 0, 1)) # Try not to leak - if has_m != 0: - res = self.jacobian_add(res, b) - return res - - - def fast_multiply(self, a, n, secret=False): - return self.from_jacobian(self.jacobian_multiply(self.to_jacobian(a), n, secret)) - - - def fast_add(self, a, b): - return self.from_jacobian(self.jacobian_add(self.to_jacobian(a), self.to_jacobian(b))) - - - def fast_shamir(self, a, n, b, m): - return self.from_jacobian(self.jacobian_shamir(self.to_jacobian(a), n, self.to_jacobian(b), m)) - - - def is_on_curve(self, a): - x, y = a - # Simple arithmetic check - if (pow(x, 3, self.p) + self.a * x + self.b) % self.p != y * y % self.p: - return False - # nP = point-at-infinity - return self.isinf(self.jacobian_multiply(self.to_jacobian(a), self.n)) diff --git a/src/lib/sslcrypto/fallback/_util.py b/src/lib/sslcrypto/fallback/_util.py deleted file mode 100644 index 2236ebee..00000000 --- a/src/lib/sslcrypto/fallback/_util.py +++ /dev/null @@ -1,79 +0,0 @@ -def int_to_bytes(raw, length): - data = [] - for _ in range(length): - data.append(raw % 256) - raw //= 256 - return bytes(data[::-1]) - - -def bytes_to_int(data): - raw = 0 - for byte in data: - raw = raw * 256 + byte - return raw - - -def legendre(a, p): - res = pow(a, (p - 1) // 2, p) - if res == p - 1: - return -1 - else: - return res - - -def inverse(a, n): - if a == 0: - return 0 - lm, hm = 1, 0 - low, high = a % n, n - while low > 1: - r = high // low - nm, new = hm - lm * r, high - low * r - lm, low, hm, high = nm, new, lm, low - return lm % n - - -def square_root_mod_prime(n, p): - if n == 0: - return 0 - if p == 2: - return n # We should never get here but it might be useful - if legendre(n, p) != 1: - raise ValueError("No square root") - # Optimizations - if p % 4 == 3: - return pow(n, (p + 1) // 4, p) - # 1. By factoring out powers of 2, find Q and S such that p - 1 = - # Q * 2 ** S with Q odd - q = p - 1 - s = 0 - while q % 2 == 0: - q //= 2 - s += 1 - # 2. Search for z in Z/pZ which is a quadratic non-residue - z = 1 - while legendre(z, p) != -1: - z += 1 - m, c, t, r = s, pow(z, q, p), pow(n, q, p), pow(n, (q + 1) // 2, p) - while True: - if t == 0: - return 0 - elif t == 1: - return r - # Use repeated squaring to find the least i, 0 < i < M, such - # that t ** (2 ** i) = 1 - t_sq = t - i = 0 - for i in range(1, m): - t_sq = t_sq * t_sq % p - if t_sq == 1: - break - else: - raise ValueError("Should never get here") - # Let b = c ** (2 ** (m - i - 1)) - b = pow(c, 2 ** (m - i - 1), p) - m = i - c = b * b % p - t = t * b * b % p - r = r * b % p - return r diff --git a/src/lib/sslcrypto/fallback/aes.py b/src/lib/sslcrypto/fallback/aes.py deleted file mode 100644 index e168bf34..00000000 --- a/src/lib/sslcrypto/fallback/aes.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import pyaes -from .._aes import AES - - -__all__ = ["aes"] - -class AESBackend: - def _get_algo_cipher_type(self, algo): - if not algo.startswith("aes-") or algo.count("-") != 2: - raise ValueError("Unknown cipher algorithm {}".format(algo)) - key_length, cipher_type = algo[4:].split("-") - if key_length not in ("128", "192", "256"): - raise ValueError("Unknown cipher algorithm {}".format(algo)) - if cipher_type not in ("cbc", "ctr", "cfb", "ofb"): - raise ValueError("Unknown cipher algorithm {}".format(algo)) - return cipher_type - - - def is_algo_supported(self, algo): - try: - self._get_algo_cipher_type(algo) - return True - except ValueError: - return False - - - def random(self, length): - return os.urandom(length) - - - def encrypt(self, data, key, algo="aes-256-cbc"): - cipher_type = self._get_algo_cipher_type(algo) - - # Generate random IV - iv = os.urandom(16) - - if cipher_type == "cbc": - cipher = pyaes.AESModeOfOperationCBC(key, iv=iv) - elif cipher_type == "ctr": - # The IV is actually a counter, not an IV but it does almost the - # same. Notice: pyaes always uses 1 as initial counter! Make sure - # not to call pyaes directly. - - # We kinda do two conversions here: from byte array to int here, and - # from int to byte array in pyaes internals. It's possible to fix that - # but I didn't notice any performance changes so I'm keeping clean code. - iv_int = 0 - for byte in iv: - iv_int = (iv_int * 256) + byte - counter = pyaes.Counter(iv_int) - cipher = pyaes.AESModeOfOperationCTR(key, counter=counter) - elif cipher_type == "cfb": - # Change segment size from default 8 bytes to 16 bytes for OpenSSL - # compatibility - cipher = pyaes.AESModeOfOperationCFB(key, iv, segment_size=16) - elif cipher_type == "ofb": - cipher = pyaes.AESModeOfOperationOFB(key, iv) - - encrypter = pyaes.Encrypter(cipher) - ciphertext = encrypter.feed(data) - ciphertext += encrypter.feed() - return ciphertext, iv - - - def decrypt(self, ciphertext, iv, key, algo="aes-256-cbc"): - cipher_type = self._get_algo_cipher_type(algo) - - if cipher_type == "cbc": - cipher = pyaes.AESModeOfOperationCBC(key, iv=iv) - elif cipher_type == "ctr": - # The IV is actually a counter, not an IV but it does almost the - # same. Notice: pyaes always uses 1 as initial counter! Make sure - # not to call pyaes directly. - - # We kinda do two conversions here: from byte array to int here, and - # from int to byte array in pyaes internals. It's possible to fix that - # but I didn't notice any performance changes so I'm keeping clean code. - iv_int = 0 - for byte in iv: - iv_int = (iv_int * 256) + byte - counter = pyaes.Counter(iv_int) - cipher = pyaes.AESModeOfOperationCTR(key, counter=counter) - elif cipher_type == "cfb": - # Change segment size from default 8 bytes to 16 bytes for OpenSSL - # compatibility - cipher = pyaes.AESModeOfOperationCFB(key, iv, segment_size=16) - elif cipher_type == "ofb": - cipher = pyaes.AESModeOfOperationOFB(key, iv) - - decrypter = pyaes.Decrypter(cipher) - data = decrypter.feed(ciphertext) - data += decrypter.feed() - return data - - - def get_backend(self): - return "fallback" - - -aes = AES(AESBackend()) diff --git a/src/lib/sslcrypto/fallback/ecc.py b/src/lib/sslcrypto/fallback/ecc.py deleted file mode 100644 index 6ca9a498..00000000 --- a/src/lib/sslcrypto/fallback/ecc.py +++ /dev/null @@ -1,199 +0,0 @@ -import hmac -import os -from ._jacobian import JacobianCurve -from .._ecc import ECC -from .aes import aes -from ._util import int_to_bytes, bytes_to_int, inverse, square_root_mod_prime - - -class EllipticCurveBackend: - def __init__(self, p, n, a, b, g): - self.p, self.n, self.a, self.b, self.g = p, n, a, b, g - self.jacobian = JacobianCurve(p, n, a, b, g) - - self.public_key_length = (len(bin(p).replace("0b", "")) + 7) // 8 - self.order_bitlength = len(bin(n).replace("0b", "")) - - - def _int_to_bytes(self, raw, len=None): - return int_to_bytes(raw, len or self.public_key_length) - - - def decompress_point(self, public_key): - # Parse & load data - x = bytes_to_int(public_key[1:]) - # Calculate Y - y_square = (pow(x, 3, self.p) + self.a * x + self.b) % self.p - try: - y = square_root_mod_prime(y_square, self.p) - except Exception: - raise ValueError("Invalid public key") from None - if y % 2 != public_key[0] - 0x02: - y = self.p - y - return self._int_to_bytes(x), self._int_to_bytes(y) - - - def new_private_key(self): - while True: - private_key = os.urandom(self.public_key_length) - if bytes_to_int(private_key) >= self.n: - continue - return private_key - - - def private_to_public(self, private_key): - raw = bytes_to_int(private_key) - x, y = self.jacobian.fast_multiply(self.g, raw) - return self._int_to_bytes(x), self._int_to_bytes(y) - - - def ecdh(self, private_key, public_key): - x, y = public_key - x, y = bytes_to_int(x), bytes_to_int(y) - private_key = bytes_to_int(private_key) - x, _ = self.jacobian.fast_multiply((x, y), private_key, secret=True) - return self._int_to_bytes(x) - - - def _subject_to_int(self, subject): - return bytes_to_int(subject[:(self.order_bitlength + 7) // 8]) - - - def sign(self, subject, raw_private_key, recoverable, is_compressed, entropy): - z = self._subject_to_int(subject) - private_key = bytes_to_int(raw_private_key) - k = bytes_to_int(entropy) - - # Fix k length to prevent Minerva. Increasing multiplier by a - # multiple of order doesn't break anything. This fix was ported - # from python-ecdsa - ks = k + self.n - kt = ks + self.n - ks_len = len(bin(ks).replace("0b", "")) // 8 - kt_len = len(bin(kt).replace("0b", "")) // 8 - if ks_len == kt_len: - k = kt - else: - k = ks - px, py = self.jacobian.fast_multiply(self.g, k, secret=True) - - r = px % self.n - if r == 0: - # Invalid k - raise ValueError("Invalid k") - - s = (inverse(k, self.n) * (z + (private_key * r))) % self.n - if s == 0: - # Invalid k - raise ValueError("Invalid k") - - inverted = False - if s * 2 >= self.n: - s = self.n - s - inverted = True - rs_buf = self._int_to_bytes(r) + self._int_to_bytes(s) - - if recoverable: - recid = (py % 2) ^ inverted - recid += 2 * int(px // self.n) - if is_compressed: - return bytes([31 + recid]) + rs_buf - else: - if recid >= 4: - raise ValueError("Too big recovery ID, use compressed address instead") - return bytes([27 + recid]) + rs_buf - else: - return rs_buf - - - def recover(self, signature, subject): - z = self._subject_to_int(subject) - - recid = signature[0] - 27 if signature[0] < 31 else signature[0] - 31 - r = bytes_to_int(signature[1:self.public_key_length + 1]) - s = bytes_to_int(signature[self.public_key_length + 1:]) - - # Verify bounds - if not 0 <= recid < 2 * (self.p // self.n + 1): - raise ValueError("Invalid recovery ID") - if r >= self.n: - raise ValueError("r is out of bounds") - if s >= self.n: - raise ValueError("s is out of bounds") - - rinv = inverse(r, self.n) - u1 = (-z * rinv) % self.n - u2 = (s * rinv) % self.n - - # Recover R - rx = r + (recid // 2) * self.n - if rx >= self.p: - raise ValueError("Rx is out of bounds") - - # Almost copied from decompress_point - ry_square = (pow(rx, 3, self.p) + self.a * rx + self.b) % self.p - try: - ry = square_root_mod_prime(ry_square, self.p) - except Exception: - raise ValueError("Invalid recovered public key") from None - - # Ensure the point is correct - if ry % 2 != recid % 2: - # Fix Ry sign - ry = self.p - ry - - x, y = self.jacobian.fast_shamir(self.g, u1, (rx, ry), u2) - return self._int_to_bytes(x), self._int_to_bytes(y) - - - def verify(self, signature, subject, public_key): - z = self._subject_to_int(subject) - - r = bytes_to_int(signature[:self.public_key_length]) - s = bytes_to_int(signature[self.public_key_length:]) - - # Verify bounds - if r >= self.n: - raise ValueError("r is out of bounds") - if s >= self.n: - raise ValueError("s is out of bounds") - - public_key = [bytes_to_int(c) for c in public_key] - - # Ensure that the public key is correct - if not self.jacobian.is_on_curve(public_key): - raise ValueError("Public key is not on curve") - - sinv = inverse(s, self.n) - u1 = (z * sinv) % self.n - u2 = (r * sinv) % self.n - - x1, _ = self.jacobian.fast_shamir(self.g, u1, public_key, u2) - if r != x1 % self.n: - raise ValueError("Invalid signature") - - return True - - - def derive_child(self, seed, child): - # Round 1 - h = hmac.new(key=b"Bitcoin seed", msg=seed, digestmod="sha512").digest() - private_key1 = h[:32] - x, y = self.private_to_public(private_key1) - public_key1 = bytes([0x02 + (y[-1] % 2)]) + x - private_key1 = bytes_to_int(private_key1) - - # Round 2 - msg = public_key1 + self._int_to_bytes(child, 4) - h = hmac.new(key=h[32:], msg=msg, digestmod="sha512").digest() - private_key2 = bytes_to_int(h[:32]) - - return self._int_to_bytes((private_key1 + private_key2) % self.n) - - - @classmethod - def get_backend(cls): - return "fallback" - - -ecc = ECC(EllipticCurveBackend, aes) diff --git a/src/lib/sslcrypto/fallback/rsa.py b/src/lib/sslcrypto/fallback/rsa.py deleted file mode 100644 index 54b8d2cb..00000000 --- a/src/lib/sslcrypto/fallback/rsa.py +++ /dev/null @@ -1,8 +0,0 @@ -# pylint: disable=too-few-public-methods - -class RSA: - def get_backend(self): - return "fallback" - - -rsa = RSA() diff --git a/src/lib/sslcrypto/openssl/__init__.py b/src/lib/sslcrypto/openssl/__init__.py deleted file mode 100644 index a32ae692..00000000 --- a/src/lib/sslcrypto/openssl/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .aes import aes -from .ecc import ecc -from .rsa import rsa diff --git a/src/lib/sslcrypto/openssl/aes.py b/src/lib/sslcrypto/openssl/aes.py deleted file mode 100644 index c58451d5..00000000 --- a/src/lib/sslcrypto/openssl/aes.py +++ /dev/null @@ -1,156 +0,0 @@ -import ctypes -import threading -from .._aes import AES -from ..fallback.aes import aes as fallback_aes -from .library import lib, openssl_backend - - -# Initialize functions -try: - lib.EVP_CIPHER_CTX_new.restype = ctypes.POINTER(ctypes.c_char) -except AttributeError: - pass -lib.EVP_get_cipherbyname.restype = ctypes.POINTER(ctypes.c_char) - - -thread_local = threading.local() - - -class Context: - def __init__(self, ptr, do_free): - self.lib = lib - self.ptr = ptr - self.do_free = do_free - - - def __del__(self): - if self.do_free: - self.lib.EVP_CIPHER_CTX_free(self.ptr) - - -class AESBackend: - ALGOS = ( - "aes-128-cbc", "aes-192-cbc", "aes-256-cbc", - "aes-128-ctr", "aes-192-ctr", "aes-256-ctr", - "aes-128-cfb", "aes-192-cfb", "aes-256-cfb", - "aes-128-ofb", "aes-192-ofb", "aes-256-ofb" - ) - - def __init__(self): - self.is_supported_ctx_new = hasattr(lib, "EVP_CIPHER_CTX_new") - self.is_supported_ctx_reset = hasattr(lib, "EVP_CIPHER_CTX_reset") - - - def _get_ctx(self): - if not hasattr(thread_local, "ctx"): - if self.is_supported_ctx_new: - thread_local.ctx = Context(lib.EVP_CIPHER_CTX_new(), True) - else: - # 1 KiB ought to be enough for everybody. We don't know the real - # size of the context buffer because we are unsure about padding and - # pointer size - thread_local.ctx = Context(ctypes.create_string_buffer(1024), False) - return thread_local.ctx.ptr - - - def get_backend(self): - return openssl_backend - - - def _get_cipher(self, algo): - if algo not in self.ALGOS: - raise ValueError("Unknown cipher algorithm {}".format(algo)) - cipher = lib.EVP_get_cipherbyname(algo.encode()) - if not cipher: - raise ValueError("Unknown cipher algorithm {}".format(algo)) - return cipher - - - def is_algo_supported(self, algo): - try: - self._get_cipher(algo) - return True - except ValueError: - return False - - - def random(self, length): - entropy = ctypes.create_string_buffer(length) - lib.RAND_bytes(entropy, length) - return bytes(entropy) - - - def encrypt(self, data, key, algo="aes-256-cbc"): - # Initialize context - ctx = self._get_ctx() - if not self.is_supported_ctx_new: - lib.EVP_CIPHER_CTX_init(ctx) - try: - lib.EVP_EncryptInit_ex(ctx, self._get_cipher(algo), None, None, None) - - # Generate random IV - iv_length = 16 - iv = self.random(iv_length) - - # Set key and IV - lib.EVP_EncryptInit_ex(ctx, None, None, key, iv) - - # Actually encrypt - block_size = 16 - output = ctypes.create_string_buffer((len(data) // block_size + 1) * block_size) - output_len = ctypes.c_int() - - if not lib.EVP_CipherUpdate(ctx, output, ctypes.byref(output_len), data, len(data)): - raise ValueError("Could not feed cipher with data") - - new_output = ctypes.byref(output, output_len.value) - output_len2 = ctypes.c_int() - if not lib.EVP_CipherFinal_ex(ctx, new_output, ctypes.byref(output_len2)): - raise ValueError("Could not finalize cipher") - - ciphertext = output[:output_len.value + output_len2.value] - return ciphertext, iv - finally: - if self.is_supported_ctx_reset: - lib.EVP_CIPHER_CTX_reset(ctx) - else: - lib.EVP_CIPHER_CTX_cleanup(ctx) - - - def decrypt(self, ciphertext, iv, key, algo="aes-256-cbc"): - # Initialize context - ctx = self._get_ctx() - if not self.is_supported_ctx_new: - lib.EVP_CIPHER_CTX_init(ctx) - try: - lib.EVP_DecryptInit_ex(ctx, self._get_cipher(algo), None, None, None) - - # Make sure IV length is correct - iv_length = 16 - if len(iv) != iv_length: - raise ValueError("Expected IV to be {} bytes, got {} bytes".format(iv_length, len(iv))) - - # Set key and IV - lib.EVP_DecryptInit_ex(ctx, None, None, key, iv) - - # Actually decrypt - output = ctypes.create_string_buffer(len(ciphertext)) - output_len = ctypes.c_int() - - if not lib.EVP_DecryptUpdate(ctx, output, ctypes.byref(output_len), ciphertext, len(ciphertext)): - raise ValueError("Could not feed decipher with ciphertext") - - new_output = ctypes.byref(output, output_len.value) - output_len2 = ctypes.c_int() - if not lib.EVP_DecryptFinal_ex(ctx, new_output, ctypes.byref(output_len2)): - raise ValueError("Could not finalize decipher") - - return output[:output_len.value + output_len2.value] - finally: - if self.is_supported_ctx_reset: - lib.EVP_CIPHER_CTX_reset(ctx) - else: - lib.EVP_CIPHER_CTX_cleanup(ctx) - - -aes = AES(AESBackend(), fallback_aes) diff --git a/src/lib/sslcrypto/openssl/discovery.py b/src/lib/sslcrypto/openssl/discovery.py deleted file mode 100644 index 0ebb0299..00000000 --- a/src/lib/sslcrypto/openssl/discovery.py +++ /dev/null @@ -1,3 +0,0 @@ -# Can be redefined by user -def discover(): - pass \ No newline at end of file diff --git a/src/lib/sslcrypto/openssl/ecc.py b/src/lib/sslcrypto/openssl/ecc.py deleted file mode 100644 index c667be8a..00000000 --- a/src/lib/sslcrypto/openssl/ecc.py +++ /dev/null @@ -1,583 +0,0 @@ -import ctypes -import hmac -import threading -from .._ecc import ECC -from .aes import aes -from .library import lib, openssl_backend - - -# Initialize functions -lib.BN_new.restype = ctypes.POINTER(ctypes.c_char) -lib.BN_bin2bn.restype = ctypes.POINTER(ctypes.c_char) -lib.BN_CTX_new.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_GROUP_new_curve_GFp.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_KEY_new.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_POINT_new.restype = ctypes.POINTER(ctypes.c_char) -lib.EC_KEY_get0_private_key.restype = ctypes.POINTER(ctypes.c_char) -lib.EVP_PKEY_new.restype = ctypes.POINTER(ctypes.c_char) -try: - lib.EVP_PKEY_CTX_new.restype = ctypes.POINTER(ctypes.c_char) -except AttributeError: - pass - - -thread_local = threading.local() - - -# This lock is required to keep ECC thread-safe. Old OpenSSL versions (before -# 1.1.0) use global objects so they aren't thread safe. Fortunately we can check -# the code to find out which functions are thread safe. -# -# For example, EC_GROUP_new_curve_GFp checks global error code to initialize -# the group, so if two errors happen at once or two threads read the error code, -# or the codes are read in the wrong order, the group is initialized in a wrong -# way. -# -# EC_KEY_new_by_curve_name calls EC_GROUP_new_curve_GFp so it's not thread -# safe. We can't use the lock because it would be too slow; instead, we use -# EC_KEY_new and then EC_KEY_set_group which calls EC_GROUP_copy instead which -# is thread safe. -lock = threading.Lock() - - -class BN: - # BN_CTX - class Context: - def __init__(self): - self.ptr = lib.BN_CTX_new() - self.lib = lib # For finalizer - - - def __del__(self): - self.lib.BN_CTX_free(self.ptr) - - - @classmethod - def get(cls): - # Get thread-safe contexf - if not hasattr(thread_local, "bn_ctx"): - thread_local.bn_ctx = cls() - return thread_local.bn_ctx.ptr - - - def __init__(self, value=None, link_only=False): - if link_only: - self.bn = value - self._free = False - else: - if value is None: - self.bn = lib.BN_new() - self._free = True - elif isinstance(value, int) and value < 256: - self.bn = lib.BN_new() - lib.BN_clear(self.bn) - lib.BN_add_word(self.bn, value) - self._free = True - else: - if isinstance(value, int): - value = value.to_bytes(128, "big") - self.bn = lib.BN_bin2bn(value, len(value), None) - self._free = True - - - def __del__(self): - if self._free: - lib.BN_free(self.bn) - - - def bytes(self, length=None): - buf = ctypes.create_string_buffer((len(self) + 7) // 8) - lib.BN_bn2bin(self.bn, buf) - buf = bytes(buf) - if length is None: - return buf - else: - if length < len(buf): - raise ValueError("Too little space for BN") - return b"\x00" * (length - len(buf)) + buf - - def __int__(self): - value = 0 - for byte in self.bytes(): - value = value * 256 + byte - return value - - def __len__(self): - return lib.BN_num_bits(self.bn) - - - def inverse(self, modulo): - result = BN() - if not lib.BN_mod_inverse(result.bn, self.bn, modulo.bn, BN.Context.get()): - raise ValueError("Could not compute inverse") - return result - - - def __floordiv__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only divide BN by BN, not {}".format(other)) - result = BN() - if not lib.BN_div(result.bn, None, self.bn, other.bn, BN.Context.get()): - raise ZeroDivisionError("Division by zero") - return result - - def __mod__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only divide BN by BN, not {}".format(other)) - result = BN() - if not lib.BN_div(None, result.bn, self.bn, other.bn, BN.Context.get()): - raise ZeroDivisionError("Division by zero") - return result - - def __add__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only sum BN's, not BN and {}".format(other)) - result = BN() - if not lib.BN_add(result.bn, self.bn, other.bn): - raise ValueError("Could not sum two BN's") - return result - - def __sub__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only subtract BN's, not BN and {}".format(other)) - result = BN() - if not lib.BN_sub(result.bn, self.bn, other.bn): - raise ValueError("Could not subtract BN from BN") - return result - - def __mul__(self, other): - if not isinstance(other, BN): - raise TypeError("Can only multiply BN by BN, not {}".format(other)) - result = BN() - if not lib.BN_mul(result.bn, self.bn, other.bn, BN.Context.get()): - raise ValueError("Could not multiply two BN's") - return result - - def __neg__(self): - return BN(0) - self - - - # A dirty but nice way to update current BN and free old BN at the same time - def __imod__(self, other): - res = self % other - self.bn, res.bn = res.bn, self.bn - return self - def __iadd__(self, other): - res = self + other - self.bn, res.bn = res.bn, self.bn - return self - def __isub__(self, other): - res = self - other - self.bn, res.bn = res.bn, self.bn - return self - def __imul__(self, other): - res = self * other - self.bn, res.bn = res.bn, self.bn - return self - - - def cmp(self, other): - if not isinstance(other, BN): - raise TypeError("Can only compare BN with BN, not {}".format(other)) - return lib.BN_cmp(self.bn, other.bn) - - def __eq__(self, other): - return self.cmp(other) == 0 - def __lt__(self, other): - return self.cmp(other) < 0 - def __gt__(self, other): - return self.cmp(other) > 0 - def __ne__(self, other): - return self.cmp(other) != 0 - def __le__(self, other): - return self.cmp(other) <= 0 - def __ge__(self, other): - return self.cmp(other) >= 0 - - - def __repr__(self): - return "".format(int(self)) - - def __str__(self): - return str(int(self)) - - -class EllipticCurveBackend: - def __init__(self, p, n, a, b, g): - bn_ctx = BN.Context.get() - - self.lib = lib # For finalizer - - self.p = BN(p) - self.order = BN(n) - self.a = BN(a) - self.b = BN(b) - self.h = BN((p + n // 2) // n) - - with lock: - # Thread-safety - self.group = lib.EC_GROUP_new_curve_GFp(self.p.bn, self.a.bn, self.b.bn, bn_ctx) - if not self.group: - raise ValueError("Could not create group object") - generator = self._public_key_to_point(g) - lib.EC_GROUP_set_generator(self.group, generator, self.order.bn, self.h.bn) - if not self.group: - raise ValueError("The curve is not supported by OpenSSL") - - self.public_key_length = (len(self.p) + 7) // 8 - - self.is_supported_evp_pkey_ctx = hasattr(lib, "EVP_PKEY_CTX_new") - - - def __del__(self): - self.lib.EC_GROUP_free(self.group) - - - def _private_key_to_ec_key(self, private_key): - # Thread-safety - eckey = lib.EC_KEY_new() - lib.EC_KEY_set_group(eckey, self.group) - if not eckey: - raise ValueError("Failed to allocate EC_KEY") - private_key = BN(private_key) - if not lib.EC_KEY_set_private_key(eckey, private_key.bn): - lib.EC_KEY_free(eckey) - raise ValueError("Invalid private key") - return eckey, private_key - - - def _public_key_to_point(self, public_key): - x = BN(public_key[0]) - y = BN(public_key[1]) - # EC_KEY_set_public_key_affine_coordinates is not supported by - # OpenSSL 1.0.0 so we can't use it - point = lib.EC_POINT_new(self.group) - if not lib.EC_POINT_set_affine_coordinates_GFp(self.group, point, x.bn, y.bn, BN.Context.get()): - raise ValueError("Could not set public key affine coordinates") - return point - - - def _public_key_to_ec_key(self, public_key): - # Thread-safety - eckey = lib.EC_KEY_new() - lib.EC_KEY_set_group(eckey, self.group) - if not eckey: - raise ValueError("Failed to allocate EC_KEY") - try: - # EC_KEY_set_public_key_affine_coordinates is not supported by - # OpenSSL 1.0.0 so we can't use it - point = self._public_key_to_point(public_key) - if not lib.EC_KEY_set_public_key(eckey, point): - raise ValueError("Could not set point") - lib.EC_POINT_free(point) - return eckey - except Exception as e: - lib.EC_KEY_free(eckey) - raise e from None - - - def _point_to_affine(self, point): - # Convert to affine coordinates - x = BN() - y = BN() - if lib.EC_POINT_get_affine_coordinates_GFp(self.group, point, x.bn, y.bn, BN.Context.get()) != 1: - raise ValueError("Failed to convert public key to affine coordinates") - # Convert to binary - if (len(x) + 7) // 8 > self.public_key_length: - raise ValueError("Public key X coordinate is too large") - if (len(y) + 7) // 8 > self.public_key_length: - raise ValueError("Public key Y coordinate is too large") - return x.bytes(self.public_key_length), y.bytes(self.public_key_length) - - - def decompress_point(self, public_key): - point = lib.EC_POINT_new(self.group) - if not point: - raise ValueError("Could not create point") - try: - if not lib.EC_POINT_oct2point(self.group, point, public_key, len(public_key), BN.Context.get()): - raise ValueError("Invalid compressed public key") - return self._point_to_affine(point) - finally: - lib.EC_POINT_free(point) - - - def new_private_key(self): - # Create random key - # Thread-safety - eckey = lib.EC_KEY_new() - lib.EC_KEY_set_group(eckey, self.group) - lib.EC_KEY_generate_key(eckey) - # To big integer - private_key = BN(lib.EC_KEY_get0_private_key(eckey), link_only=True) - # To binary - private_key_buf = private_key.bytes(self.public_key_length) - # Cleanup - lib.EC_KEY_free(eckey) - return private_key_buf - - - def private_to_public(self, private_key): - eckey, private_key = self._private_key_to_ec_key(private_key) - try: - # Derive public key - point = lib.EC_POINT_new(self.group) - try: - if not lib.EC_POINT_mul(self.group, point, private_key.bn, None, None, BN.Context.get()): - raise ValueError("Failed to derive public key") - return self._point_to_affine(point) - finally: - lib.EC_POINT_free(point) - finally: - lib.EC_KEY_free(eckey) - - - def ecdh(self, private_key, public_key): - if not self.is_supported_evp_pkey_ctx: - # Use ECDH_compute_key instead - # Create EC_KEY from private key - eckey, _ = self._private_key_to_ec_key(private_key) - try: - # Create EC_POINT from public key - point = self._public_key_to_point(public_key) - try: - key = ctypes.create_string_buffer(self.public_key_length) - if lib.ECDH_compute_key(key, self.public_key_length, point, eckey, None) == -1: - raise ValueError("Could not compute shared secret") - return bytes(key) - finally: - lib.EC_POINT_free(point) - finally: - lib.EC_KEY_free(eckey) - - # Private key: - # Create EC_KEY - eckey, _ = self._private_key_to_ec_key(private_key) - try: - # Convert to EVP_PKEY - pkey = lib.EVP_PKEY_new() - if not pkey: - raise ValueError("Could not create private key object") - try: - lib.EVP_PKEY_set1_EC_KEY(pkey, eckey) - - # Public key: - # Create EC_KEY - peer_eckey = self._public_key_to_ec_key(public_key) - try: - # Convert to EVP_PKEY - peer_pkey = lib.EVP_PKEY_new() - if not peer_pkey: - raise ValueError("Could not create public key object") - try: - lib.EVP_PKEY_set1_EC_KEY(peer_pkey, peer_eckey) - - # Create context - ctx = lib.EVP_PKEY_CTX_new(pkey, None) - if not ctx: - raise ValueError("Could not create EVP context") - try: - if lib.EVP_PKEY_derive_init(ctx) != 1: - raise ValueError("Could not initialize key derivation") - if not lib.EVP_PKEY_derive_set_peer(ctx, peer_pkey): - raise ValueError("Could not set peer") - - # Actually derive - key_len = ctypes.c_int(0) - lib.EVP_PKEY_derive(ctx, None, ctypes.byref(key_len)) - key = ctypes.create_string_buffer(key_len.value) - lib.EVP_PKEY_derive(ctx, key, ctypes.byref(key_len)) - - return bytes(key) - finally: - lib.EVP_PKEY_CTX_free(ctx) - finally: - lib.EVP_PKEY_free(peer_pkey) - finally: - lib.EC_KEY_free(peer_eckey) - finally: - lib.EVP_PKEY_free(pkey) - finally: - lib.EC_KEY_free(eckey) - - - def _subject_to_bn(self, subject): - return BN(subject[:(len(self.order) + 7) // 8]) - - - def sign(self, subject, private_key, recoverable, is_compressed, entropy): - z = self._subject_to_bn(subject) - private_key = BN(private_key) - k = BN(entropy) - - rp = lib.EC_POINT_new(self.group) - bn_ctx = BN.Context.get() - try: - # Fix Minerva - k1 = k + self.order - k2 = k1 + self.order - if len(k1) == len(k2): - k = k2 - else: - k = k1 - if not lib.EC_POINT_mul(self.group, rp, k.bn, None, None, bn_ctx): - raise ValueError("Could not generate R") - # Convert to affine coordinates - rx = BN() - ry = BN() - if lib.EC_POINT_get_affine_coordinates_GFp(self.group, rp, rx.bn, ry.bn, bn_ctx) != 1: - raise ValueError("Failed to convert R to affine coordinates") - r = rx % self.order - if r == BN(0): - raise ValueError("Invalid k") - # Calculate s = k^-1 * (z + r * private_key) mod n - s = (k.inverse(self.order) * (z + r * private_key)) % self.order - if s == BN(0): - raise ValueError("Invalid k") - - inverted = False - if s * BN(2) >= self.order: - s = self.order - s - inverted = True - - r_buf = r.bytes(self.public_key_length) - s_buf = s.bytes(self.public_key_length) - if recoverable: - # Generate recid - recid = int(ry % BN(2)) ^ inverted - # The line below is highly unlikely to matter in case of - # secp256k1 but might make sense for other curves - recid += 2 * int(rx // self.order) - if is_compressed: - return bytes([31 + recid]) + r_buf + s_buf - else: - if recid >= 4: - raise ValueError("Too big recovery ID, use compressed address instead") - return bytes([27 + recid]) + r_buf + s_buf - else: - return r_buf + s_buf - finally: - lib.EC_POINT_free(rp) - - - def recover(self, signature, subject): - recid = signature[0] - 27 if signature[0] < 31 else signature[0] - 31 - r = BN(signature[1:self.public_key_length + 1]) - s = BN(signature[self.public_key_length + 1:]) - - # Verify bounds - if r >= self.order: - raise ValueError("r is out of bounds") - if s >= self.order: - raise ValueError("s is out of bounds") - - bn_ctx = BN.Context.get() - - z = self._subject_to_bn(subject) - - rinv = r.inverse(self.order) - u1 = (-z * rinv) % self.order - u2 = (s * rinv) % self.order - - # Recover R - rx = r + BN(recid // 2) * self.order - if rx >= self.p: - raise ValueError("Rx is out of bounds") - rp = lib.EC_POINT_new(self.group) - if not rp: - raise ValueError("Could not create R") - try: - init_buf = b"\x02" + rx.bytes(self.public_key_length) - if not lib.EC_POINT_oct2point(self.group, rp, init_buf, len(init_buf), bn_ctx): - raise ValueError("Could not use Rx to initialize point") - ry = BN() - if lib.EC_POINT_get_affine_coordinates_GFp(self.group, rp, None, ry.bn, bn_ctx) != 1: - raise ValueError("Failed to convert R to affine coordinates") - if int(ry % BN(2)) != recid % 2: - # Fix Ry sign - ry = self.p - ry - if lib.EC_POINT_set_affine_coordinates_GFp(self.group, rp, rx.bn, ry.bn, bn_ctx) != 1: - raise ValueError("Failed to update R coordinates") - - # Recover public key - result = lib.EC_POINT_new(self.group) - if not result: - raise ValueError("Could not create point") - try: - if not lib.EC_POINT_mul(self.group, result, u1.bn, rp, u2.bn, bn_ctx): - raise ValueError("Could not recover public key") - return self._point_to_affine(result) - finally: - lib.EC_POINT_free(result) - finally: - lib.EC_POINT_free(rp) - - - def verify(self, signature, subject, public_key): - r_raw = signature[:self.public_key_length] - r = BN(r_raw) - s = BN(signature[self.public_key_length:]) - if r >= self.order: - raise ValueError("r is out of bounds") - if s >= self.order: - raise ValueError("s is out of bounds") - - bn_ctx = BN.Context.get() - - z = self._subject_to_bn(subject) - - pub_p = lib.EC_POINT_new(self.group) - if not pub_p: - raise ValueError("Could not create public key point") - try: - init_buf = b"\x04" + public_key[0] + public_key[1] - if not lib.EC_POINT_oct2point(self.group, pub_p, init_buf, len(init_buf), bn_ctx): - raise ValueError("Could initialize point") - - sinv = s.inverse(self.order) - u1 = (z * sinv) % self.order - u2 = (r * sinv) % self.order - - # Recover public key - result = lib.EC_POINT_new(self.group) - if not result: - raise ValueError("Could not create point") - try: - if not lib.EC_POINT_mul(self.group, result, u1.bn, pub_p, u2.bn, bn_ctx): - raise ValueError("Could not recover public key") - if BN(self._point_to_affine(result)[0]) % self.order != r: - raise ValueError("Invalid signature") - return True - finally: - lib.EC_POINT_free(result) - finally: - lib.EC_POINT_free(pub_p) - - - def derive_child(self, seed, child): - # Round 1 - h = hmac.new(key=b"Bitcoin seed", msg=seed, digestmod="sha512").digest() - private_key1 = h[:32] - x, y = self.private_to_public(private_key1) - public_key1 = bytes([0x02 + (y[-1] % 2)]) + x - private_key1 = BN(private_key1) - - # Round 2 - child_bytes = [] - for _ in range(4): - child_bytes.append(child & 255) - child >>= 8 - child_bytes = bytes(child_bytes[::-1]) - msg = public_key1 + child_bytes - h = hmac.new(key=h[32:], msg=msg, digestmod="sha512").digest() - private_key2 = BN(h[:32]) - - return ((private_key1 + private_key2) % self.order).bytes(self.public_key_length) - - - @classmethod - def get_backend(cls): - return openssl_backend - - -ecc = ECC(EllipticCurveBackend, aes) diff --git a/src/lib/sslcrypto/openssl/library.py b/src/lib/sslcrypto/openssl/library.py deleted file mode 100644 index 47bedc3a..00000000 --- a/src/lib/sslcrypto/openssl/library.py +++ /dev/null @@ -1,98 +0,0 @@ -import os -import sys -import ctypes -import ctypes.util -from .discovery import discover as user_discover - - -# Disable false-positive _MEIPASS -# pylint: disable=no-member,protected-access - -# Discover OpenSSL library -def discover_paths(): - # Search local files first - if "win" in sys.platform: - # Windows - names = [ - "libeay32.dll" - ] - openssl_paths = [os.path.abspath(path) for path in names] - if hasattr(sys, "_MEIPASS"): - openssl_paths += [os.path.join(sys._MEIPASS, path) for path in openssl_paths] - openssl_paths.append(ctypes.util.find_library("libeay32")) - elif "darwin" in sys.platform: - # Mac OS - names = [ - "libcrypto.dylib", - "libcrypto.1.1.0.dylib", - "libcrypto.1.0.2.dylib", - "libcrypto.1.0.1.dylib", - "libcrypto.1.0.0.dylib", - "libcrypto.0.9.8.dylib" - ] - openssl_paths = [os.path.abspath(path) for path in names] - openssl_paths += names - openssl_paths += [ - "/usr/local/opt/openssl/lib/libcrypto.dylib" - ] - if hasattr(sys, "_MEIPASS") and "RESOURCEPATH" in os.environ: - openssl_paths += [ - os.path.join(os.environ["RESOURCEPATH"], "..", "Frameworks", name) - for name in names - ] - openssl_paths.append(ctypes.util.find_library("ssl")) - else: - # Linux, BSD and such - names = [ - "libcrypto.so", - "libssl.so", - "libcrypto.so.1.1.0", - "libssl.so.1.1.0", - "libcrypto.so.1.0.2", - "libssl.so.1.0.2", - "libcrypto.so.1.0.1", - "libssl.so.1.0.1", - "libcrypto.so.1.0.0", - "libssl.so.1.0.0", - "libcrypto.so.0.9.8", - "libssl.so.0.9.8" - ] - openssl_paths = [os.path.abspath(path) for path in names] - openssl_paths += names - if hasattr(sys, "_MEIPASS"): - openssl_paths += [os.path.join(sys._MEIPASS, path) for path in names] - openssl_paths.append(ctypes.util.find_library("ssl")) - lst = user_discover() - if isinstance(lst, str): - lst = [lst] - elif not lst: - lst = [] - return lst + openssl_paths - - -def discover_library(): - for path in discover_paths(): - if path: - try: - return ctypes.CDLL(path) - except OSError: - pass - raise OSError("OpenSSL is unavailable") - - -lib = discover_library() - -# Initialize internal state -try: - lib.OPENSSL_add_all_algorithms_conf() -except AttributeError: - pass - -try: - lib.OpenSSL_version.restype = ctypes.c_char_p - openssl_backend = lib.OpenSSL_version(0).decode() -except AttributeError: - lib.SSLeay_version.restype = ctypes.c_char_p - openssl_backend = lib.SSLeay_version(0).decode() - -openssl_backend += " at " + lib._name diff --git a/src/lib/sslcrypto/openssl/rsa.py b/src/lib/sslcrypto/openssl/rsa.py deleted file mode 100644 index afd8b51c..00000000 --- a/src/lib/sslcrypto/openssl/rsa.py +++ /dev/null @@ -1,11 +0,0 @@ -# pylint: disable=too-few-public-methods - -from .library import openssl_backend - - -class RSA: - def get_backend(self): - return openssl_backend - - -rsa = RSA() diff --git a/src/lib/subtl/subtl.py b/src/lib/subtl/subtl.py index cd8c5b2c..bf6acad1 100644 --- a/src/lib/subtl/subtl.py +++ b/src/lib/subtl/subtl.py @@ -1,7 +1,6 @@ ''' Based on the specification at http://bittorrent.org/beps/bep_0015.html ''' -import binascii import random import struct import time @@ -17,6 +16,19 @@ SCRAPE = 2 ERROR = 3 +def norm_info_hash(info_hash): + if len(info_hash) == 40: + info_hash = info_hash.decode('hex') + if len(info_hash) != 20: + raise UdpTrackerClientException( + 'info_hash length is not 20: {}'.format(len(info_hash))) + return info_hash + + +def info_hash_to_str(info_hash): + return binascii.hexlify(info_hash) + + class UdpTrackerClientException(Exception): pass @@ -59,10 +71,9 @@ class UdpTrackerClient: self._check_fields(args, fields) # Humans tend to use hex representations of the hash. Wasteful humans. - args['info_hash'] = args['info_hash'] + args['info_hash'] = norm_info_hash(args['info_hash']) values = [args[a] for a in fields.split()] - values[1] = values[1].encode("utf8") payload = struct.pack('!20s20sQQQLLLLH', *values) return self._send(ANNOUNCE, payload) @@ -72,6 +83,7 @@ class UdpTrackerClient: payload = '' for info_hash in info_hash_list: + info_hash = norm_info_hash(info_hash) payload += info_hash trans = self._send(SCRAPE, payload) @@ -99,11 +111,11 @@ class UdpTrackerClient: return trans def error(self, message): - raise Exception('error: {}'.format(message)) + print('error: {}'.format(message)) def _send(self, action, payload=None): if not payload: - payload = b'' + payload = '' trans_id, header = self._request_header(action) self.transactions[trans_id] = trans = { 'action': action, @@ -127,7 +139,7 @@ class UdpTrackerClient: elif action == SCRAPE: return self._process_scrape(payload, trans) elif action == ERROR: - return self._process_error(payload, trans) + return self._proecss_error(payload, trans) else: raise UdpTrackerClientException( 'Unknown action response: {}'.format(action)) @@ -147,10 +159,10 @@ class UdpTrackerClient: peer_data = payload[info_size:] peer_struct = '!LH' peer_size = struct.calcsize(peer_struct) - peer_count = int(len(peer_data) / peer_size) + peer_count = len(peer_data) / peer_size peers = [] - for peer_offset in range(peer_count): + for peer_offset in xrange(peer_count): off = peer_size * peer_offset peer = peer_data[off:off + peer_size] addr, port = struct.unpack(peer_struct, peer) @@ -172,7 +184,7 @@ class UdpTrackerClient: info_count = len(payload) / info_size hashes = trans['sent_hashes'] response = {} - for info_offset in range(info_count): + for info_offset in xrange(info_count): off = info_size * info_offset info = payload[off:off + info_size] seeders, completed, leechers = struct.unpack(info_struct, info) @@ -189,13 +201,13 @@ class UdpTrackerClient: it here for the possibility. ''' self.error(payload) - return False + return payload def _generate_peer_id(self): '''http://www.bittorrent.org/beps/bep_0020.html''' peer_id = '-PU' + __version__.replace('.', '-') + '-' remaining = 20 - len(peer_id) - numbers = [str(random.randint(0, 9)) for _ in range(remaining)] + numbers = [str(random.randint(0, 9)) for _ in xrange(remaining)] peer_id += ''.join(numbers) assert(len(peer_id) == 20) return peer_id diff --git a/src/main.py b/src/main.py index ec90f4d9..fa8ef59b 100644 --- a/src/main.py +++ b/src/main.py @@ -5,25 +5,21 @@ import stat import time import logging -startup_errors = [] -def startupError(msg): - startup_errors.append(msg) - print("Startup error: %s" % msg) - # Third party modules import gevent -if gevent.version_info.major <= 1: # Workaround for random crash when libuv used with threads - try: - if "libev" not in str(gevent.config.loop): - gevent.config.loop = "libev-cext" - except Exception as err: - startupError("Unable to switch gevent loop to libev: %s" % err) -import gevent.monkey -gevent.monkey.patch_all(thread=False, subprocess=False) +from gevent import monkey +if "patch_subprocess" in dir(monkey): # New gevent + monkey.patch_all(thread=False, subprocess=False) +else: # Old gevent + import ssl + # Fix PROTOCOL_SSLv3 not defined + if "PROTOCOL_SSLv3" not in dir(ssl): + ssl.PROTOCOL_SSLv3 = ssl.PROTOCOL_SSLv23 + monkey.patch_all(thread=False) +# Not thread: pyfilesystem and systray icon, Not subprocess: Gevent 1.1+ update_after_shutdown = False # If set True then update and restart zeronet after main loop ended -restart_after_shutdown = False # If set True then restart zeronet after main loop ended # Load config from Config import config @@ -31,44 +27,76 @@ config.parse(silent=True) # Plugins need to access the configuration if not config.arguments: # Config parse failed, show the help screen and exit config.parse() +# Create necessary files and dirs +if not os.path.isdir(config.log_dir): + os.mkdir(config.log_dir) + try: + os.chmod(config.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) + except Exception, err: + print "Can't change permission of %s: %s" % (config.log_dir, err) + if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir) try: os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - except Exception as err: - startupError("Can't change permission of %s: %s" % (config.data_dir, err)) + except Exception, err: + print "Can't change permission of %s: %s" % (config.data_dir, err) if not os.path.isfile("%s/sites.json" % config.data_dir): open("%s/sites.json" % config.data_dir, "w").write("{}") if not os.path.isfile("%s/users.json" % config.data_dir): open("%s/users.json" % config.data_dir, "w").write("{}") +# Setup logging if config.action == "main": from util import helper + log_file_path = "%s/debug.log" % config.log_dir try: lock = helper.openLocked("%s/lock.pid" % config.data_dir, "w") lock.write("%s" % os.getpid()) - except BlockingIOError as err: - startupError("Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err) - if config.open_browser and config.open_browser != "False": - print("Opening browser: %s...", config.open_browser) + except IOError as err: + print "Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err + if config.open_browser: + print "Opening browser: %s...", config.open_browser import webbrowser - try: - if config.open_browser == "default_browser": - browser = webbrowser.get() - else: - browser = webbrowser.get(config.open_browser) - browser.open("http://%s:%s/%s" % ( - config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage - ), new=2) - except Exception as err: - startupError("Error starting browser: %s" % err) + if config.open_browser == "default_browser": + browser = webbrowser.get() + else: + browser = webbrowser.get(config.open_browser) + browser.open("http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage), new=2) sys.exit() -config.initLogging() + if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate + if os.path.isfile("%s/debug-last.log" % config.log_dir): + os.unlink("%s/debug-last.log" % config.log_dir) + os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir) + logging.basicConfig( + format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', + level=logging.DEBUG, stream=open(log_file_path, "a") + ) +else: + log_file_path = "%s/cmd.log" % config.log_dir + logging.basicConfig( + format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', + level=logging.DEBUG, stream=open(log_file_path, "w") + ) + +# Console logger +console_log = logging.StreamHandler() +if config.action == "main": # Add time if main action + console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S")) +else: + console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S")) + +logging.getLogger('').addHandler(console_log) # Add console logger +logging.getLogger('').name = "-" # Remove root prefix # Debug dependent configuration from Debug import DebugHook +if config.debug: + console_log.setLevel(logging.DEBUG) # Display everything to console +else: + console_log.setLevel(logging.INFO) # Display only important info to console # Load plugins from Plugin import PluginManager @@ -88,27 +116,17 @@ if config.stack_size: if config.msgpack_purepython: os.environ["MSGPACK_PUREPYTHON"] = "True" -# Fix console encoding on Windows -if sys.platform.startswith("win"): - import subprocess - try: - chcp_res = subprocess.check_output("chcp 65001", shell=True).decode(errors="ignore").strip() - logging.debug("Changed console encoding to utf8: %s" % chcp_res) - except Exception as err: - logging.error("Error changing console encoding to utf8: %s" % err) - # Socket monkey patch if config.proxy: from util import SocksProxy - import urllib.request + import urllib2 logging.info("Patching sockets to socks proxy: %s" % config.proxy) if config.fileserver_ip == "*": config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost - config.disable_udp = True # UDP not supported currently with proxy SocksProxy.monkeyPatch(*config.proxy.split(":")) elif config.tor == "always": from util import SocksProxy - import urllib.request + import urllib2 logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy) if config.fileserver_ip == "*": config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost @@ -130,9 +148,7 @@ class Actions(object): logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) func = getattr(self, function_name, None) - back = func(**kwargs) - if back: - print(back) + func(**kwargs) # Default action: Start serving UiServer and FileServer def main(self): @@ -143,10 +159,6 @@ class Actions(object): file_server = FileServer() logging.info("Creating UiServer....") ui_server = UiServer() - file_server.ui_server = ui_server - - for startup_error in startup_errors: - logging.error("Startup error: %s" % startup_error) logging.info("Removing old SSL certs...") from Crypt import CryptConnection @@ -154,38 +166,28 @@ class Actions(object): logging.info("Starting servers....") gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)]) - logging.info("All server stopped") # Site commands - def siteCreate(self, use_master_seed=True): - logging.info("Generating new privatekey (use_master_seed: %s)..." % config.use_master_seed) + def siteCreate(self): + logging.info("Generating new privatekey...") from Crypt import CryptBitcoin - if use_master_seed: - from User import UserManager - user = UserManager.user_manager.get() - if not user: - user = UserManager.user_manager.create() - address, address_index, site_data = user.getNewSiteData() - privatekey = site_data["privatekey"] - logging.info("Generated using master seed from users.json, site index: %s" % address_index) - else: - privatekey = CryptBitcoin.newPrivatekey() - address = CryptBitcoin.privatekeyToAddress(privatekey) + privatekey = CryptBitcoin.newPrivatekey() logging.info("----------------------------------------------------------------------") logging.info("Site private key: %s" % privatekey) logging.info(" !!! ^ Save it now, required to modify the site ^ !!!") + address = CryptBitcoin.privatekeyToAddress(privatekey) logging.info("Site address: %s" % address) logging.info("----------------------------------------------------------------------") - while True and not config.batch and not use_master_seed: - if input("? Have you secured your private key? (yes, no) > ").lower() == "yes": + while True and not config.batch: + if raw_input("? Have you secured your private key? (yes, no) > ").lower() == "yes": break else: logging.info("Please, secure it now, you going to need it to modify your site!") logging.info("Creating directory structure...") - from Site.Site import Site + from Site import Site from Site import SiteManager SiteManager.site_manager.load() @@ -194,20 +196,15 @@ class Actions(object): logging.info("Creating content.json...") site = Site(address) - extend = {"postmessage_nonce_security": True} - if use_master_seed: - extend["address_index"] = address_index - - site.content_manager.sign(privatekey=privatekey, extend=extend) + site.content_manager.sign(privatekey=privatekey, extend={"postmessage_nonce_security": True}) site.settings["own"] = True site.saveSettings() logging.info("Site created!") def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False): - from Site.Site import Site + from Site import Site from Site import SiteManager - from Debug import Debug SiteManager.site_manager.load() logging.info("Signing site: %s..." % address) site = Site(address, allow_create=False) @@ -224,20 +221,14 @@ class Actions(object): # Not found in users.json, ask from console import getpass privatekey = getpass.getpass("Private key (input hidden):") - try: - succ = site.content_manager.sign( - inner_path=inner_path, privatekey=privatekey, - update_changed_files=True, remove_missing_optional=remove_missing_optional - ) - except Exception as err: - logging.error("Sign error: %s" % Debug.formatException(err)) - succ = False + diffs = site.content_manager.getDiffs(inner_path) + succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True, remove_missing_optional=remove_missing_optional) if succ and publish: - self.sitePublish(address, inner_path=inner_path) + self.sitePublish(address, inner_path=inner_path, diffs=diffs) def siteVerify(self, address): import time - from Site.Site import Site + from Site import Site from Site import SiteManager SiteManager.site_manager.load() @@ -249,45 +240,36 @@ class Actions(object): for content_inner_path in site.content_manager.contents: s = time.time() logging.info("Verifing %s signature..." % content_inner_path) - err = None - try: - file_correct = site.content_manager.verifyFile( - content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False - ) - except Exception as exp: - file_correct = False - err = exp - + file_correct = site.content_manager.verifyFile( + content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False + ) if file_correct is True: logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s)) else: - logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err)) - input("Continue?") + logging.error("[ERROR] %s: invalid file!" % content_inner_path) + raw_input("Continue?") bad_files += content_inner_path logging.info("Verifying site files...") - bad_files += site.storage.verifyFiles()["bad_files"] + bad_files += site.storage.verifyFiles() if not bad_files: logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s)) else: logging.error("[ERROR] Error during verifying site files!") def dbRebuild(self, address): - from Site.Site import Site + from Site import Site from Site import SiteManager SiteManager.site_manager.load() logging.info("Rebuilding site sql cache: %s..." % address) site = SiteManager.site_manager.get(address) s = time.time() - try: - site.storage.rebuildDb() - logging.info("Done in %.3fs" % (time.time() - s)) - except Exception as err: - logging.error(err) + site.storage.rebuildDb() + logging.info("Done in %.3fs" % (time.time() - s)) def dbQuery(self, address, query): - from Site.Site import Site + from Site import Site from Site import SiteManager SiteManager.site_manager.load() @@ -296,37 +278,30 @@ class Actions(object): result = [] for row in site.storage.query(query): result.append(dict(row)) - print(json.dumps(result, indent=4)) + print json.dumps(result, indent=4) def siteAnnounce(self, address): from Site.Site import Site from Site import SiteManager SiteManager.site_manager.load() - logging.info("Opening a simple connection server") - global file_server - from File import FileServer - file_server = FileServer("127.0.0.1", 1234) - file_server.start() - logging.info("Announcing site %s to tracker..." % address) site = Site(address) s = time.time() site.announce() - print("Response time: %.3fs" % (time.time() - s)) - print(site.peers) + print "Response time: %.3fs" % (time.time() - s) + print site.peers def siteDownload(self, address): - from Site.Site import Site + from Site import Site from Site import SiteManager SiteManager.site_manager.load() logging.info("Opening a simple connection server") global file_server - from File import FileServer - file_server = FileServer("127.0.0.1", 1234) - file_server_thread = gevent.spawn(file_server.start, check_sites=False) + from Connection import ConnectionServer + file_server = ConnectionServer("127.0.0.1", 1234) site = Site(address) @@ -336,17 +311,19 @@ class Actions(object): evt.set(True) site.onComplete.once(lambda: onComplete(on_completed)) - print("Announcing...") + print "Announcing..." site.announce() s = time.time() - print("Downloading...") + print "Downloading..." site.downloadContent("content.json", check_modifications=True) - print("Downloaded in %.3fs" % (time.time()-s)) + print on_completed.get() + print "Downloaded in %.3fs" % (time.time()-s) + def siteNeedFile(self, address, inner_path): - from Site.Site import Site + from Site import Site from Site import SiteManager SiteManager.site_manager.load() @@ -354,86 +331,45 @@ class Actions(object): while 1: s = time.time() time.sleep(1) - print("Switch time:", time.time() - s) + print "Switch time:", time.time() - s gevent.spawn(checker) logging.info("Opening a simple connection server") global file_server - from File import FileServer - file_server = FileServer("127.0.0.1", 1234) - file_server_thread = gevent.spawn(file_server.start, check_sites=False) + from Connection import ConnectionServer + file_server = ConnectionServer("127.0.0.1", 1234) site = Site(address) site.announce() - print(site.needFile(inner_path, update=True)) + print site.needFile(inner_path, update=True) - def siteCmd(self, address, cmd, parameters): - import json - from Site import SiteManager - - site = SiteManager.site_manager.get(address) - - if not site: - logging.error("Site not found: %s" % address) - return None - - ws = self.getWebsocket(site) - - ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1})) - res_raw = ws.recv() - - try: - res = json.loads(res_raw) - except Exception as err: - return {"error": "Invalid result: %s" % err, "res_raw": res_raw} - - if "result" in res: - return res["result"] - else: - return res - - def getWebsocket(self, site): - import websocket - - ws_address = "ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"]) - logging.info("Connecting to %s" % ws_address) - ws = websocket.create_connection(ws_address) - return ws - - def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): + def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", diffs={}): global file_server - from Site.Site import Site + from Site import Site from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer - file_server = FileServer() - site = SiteManager.site_manager.get(address) + SiteManager.site_manager.load() + logging.info("Loading site...") + site = Site(address, allow_create=False) site.settings["serving"] = True # Serving the site even if its disabled - try: - ws = self.getWebsocket(site) - logging.info("Sending siteReload") - self.siteCmd(address, "siteReload", inner_path) - - logging.info("Sending sitePublish") - self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False}) - logging.info("Done.") - - except Exception as err: - logging.info("Can't connect to local websocket client: %s" % err) - logging.info("Creating FileServer....") - file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity - time.sleep(0.001) + logging.info("Creating FileServer....") + file_server = FileServer() + site.connection_server = file_server + file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity + time.sleep(0.001) + if not file_server_thread.ready(): # Started fileserver - file_server.portCheck() + file_server.openport() if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers - published = site.publish(10, inner_path) # Push to peers + published = site.publish(5, inner_path, diffs=diffs) # Push to peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") @@ -441,6 +377,18 @@ class Actions(object): logging.info("Done.") else: logging.info("No peers found, sitePublish command only works if you already have visitors serving your site") + else: + # Already running, notify local client on new content + logging.info("Sending siteReload") + if config.fileserver_ip == "*": + my_peer = Peer("127.0.0.1", config.fileserver_port) + else: + my_peer = Peer(config.fileserver_ip, config.fileserver_port) + + logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path})) + logging.info("Sending sitePublish") + logging.info(my_peer.request("sitePublish", {"site": site.address, "inner_path": inner_path, "diffs": diffs})) + logging.info("Done.") # Crypto commands def cryptPrivatekeyToAddress(self, privatekey=None): @@ -449,63 +397,34 @@ class Actions(object): import getpass privatekey = getpass.getpass("Private key (input hidden):") - print(CryptBitcoin.privatekeyToAddress(privatekey)) + print CryptBitcoin.privatekeyToAddress(privatekey) def cryptSign(self, message, privatekey): from Crypt import CryptBitcoin - print(CryptBitcoin.sign(message, privatekey)) - - def cryptVerify(self, message, sign, address): - from Crypt import CryptBitcoin - print(CryptBitcoin.verify(message, address, sign)) - - def cryptGetPrivatekey(self, master_seed, site_address_index=None): - from Crypt import CryptBitcoin - if len(master_seed) != 64: - logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed)) - return False - privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index) - print("Requested private key: %s" % privatekey) + print CryptBitcoin.sign(message, privatekey) # Peer def peerPing(self, peer_ip, peer_port=None): if not peer_port: - peer_port = 15441 + peer_port = config.fileserver_port logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) - file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) - s = time.time() peer = Peer(peer_ip, peer_port) - peer.connect() - - if not peer.connection: - print("Error: Can't connect to peer (connection error: %s)" % peer.connection_error) - return False - if "shared_ciphers" in dir(peer.connection.sock): - print("Shared ciphers:", peer.connection.sock.shared_ciphers()) - if "cipher" in dir(peer.connection.sock): - print("Cipher:", peer.connection.sock.cipher()[0]) - if "version" in dir(peer.connection.sock): - print("TLS version:", peer.connection.sock.version()) - print("Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error)) - for i in range(5): - ping_delay = peer.ping() - print("Response time: %.3fs" % ping_delay) + print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) time.sleep(1) peer.remove() - print("Reconnect test...") + print "Reconnect test..." peer = Peer(peer_ip, peer_port) for i in range(5): - ping_delay = peer.ping() - print("Response time: %.3fs" % ping_delay) + print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) time.sleep(1) def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): @@ -513,7 +432,6 @@ class Actions(object): global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) - file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() @@ -524,17 +442,16 @@ class Actions(object): if benchmark: for i in range(10): peer.getFile(site, filename), - print("Response time: %.3fs" % (time.time() - s)) - input("Check memory") + print "Response time: %.3fs" % (time.time() - s) + raw_input("Check memory") else: - print(peer.getFile(site, filename).read()) + print peer.getFile(site, filename).read() def peerCmd(self, peer_ip, peer_port, cmd, parameters): logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer() - file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() @@ -546,51 +463,7 @@ class Actions(object): parameters = json.loads(parameters.replace("'", '"')) else: parameters = {} - try: - res = peer.request(cmd, parameters) - print(json.dumps(res, indent=2, ensure_ascii=False)) - except Exception as err: - print("Unknown response (%s): %s" % (err, res)) - - def getConfig(self): - import json - print(json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False)) - - def test(self, test_name, *args, **kwargs): - import types - def funcToName(func_name): - test_name = func_name.replace("test", "") - return test_name[0].lower() + test_name[1:] - - test_names = [funcToName(name) for name in dir(self) if name.startswith("test") and name != "test"] - if not test_name: - # No test specificed, list tests - print("\nNo test specified, possible tests:") - for test_name in test_names: - func_name = "test" + test_name[0].upper() + test_name[1:] - func = getattr(self, func_name) - if func.__doc__: - print("- %s: %s" % (test_name, func.__doc__.strip())) - else: - print("- %s" % test_name) - return None - - # Run tests - func_name = "test" + test_name[0].upper() + test_name[1:] - if hasattr(self, func_name): - func = getattr(self, func_name) - print("- Running test: %s" % test_name, end="") - s = time.time() - ret = func(*args, **kwargs) - if type(ret) is types.GeneratorType: - for progress in ret: - print(progress, end="") - sys.stdout.flush() - print("\n* Test %s done in %.3fs" % (test_name, time.time() - s)) - else: - print("Unknown test: %r (choose from: %s)" % ( - test_name, test_names - )) + logging.info("Response: %s" % peer.request(cmd, parameters)) actions = Actions() diff --git a/src/util/Cached.py b/src/util/Cached.py deleted file mode 100644 index 72d60dbc..00000000 --- a/src/util/Cached.py +++ /dev/null @@ -1,68 +0,0 @@ -import time - - -class Cached(object): - def __init__(self, timeout): - self.cache_db = {} - self.timeout = timeout - - def __call__(self, func): - def wrapper(*args, **kwargs): - key = "%s %s" % (args, kwargs) - cached_value = None - cache_hit = False - if key in self.cache_db: - cache_hit = True - cached_value, time_cached_end = self.cache_db[key] - if time.time() > time_cached_end: - self.cleanupExpired() - cached_value = None - cache_hit = False - - if cache_hit: - return cached_value - else: - cached_value = func(*args, **kwargs) - time_cached_end = time.time() + self.timeout - self.cache_db[key] = (cached_value, time_cached_end) - return cached_value - - wrapper.emptyCache = self.emptyCache - - return wrapper - - def cleanupExpired(self): - for key in list(self.cache_db.keys()): - cached_value, time_cached_end = self.cache_db[key] - if time.time() > time_cached_end: - del(self.cache_db[key]) - - def emptyCache(self): - num = len(self.cache_db) - self.cache_db.clear() - return num - - -if __name__ == "__main__": - from gevent import monkey - monkey.patch_all() - - @Cached(timeout=2) - def calcAdd(a, b): - print("CalcAdd", a, b) - return a + b - - @Cached(timeout=1) - def calcMultiply(a, b): - print("calcMultiply", a, b) - return a * b - - for i in range(5): - print("---") - print("Emptied", calcAdd.emptyCache()) - assert calcAdd(1, 2) == 3 - print("Emptied", calcAdd.emptyCache()) - assert calcAdd(1, 2) == 3 - assert calcAdd(2, 3) == 5 - assert calcMultiply(2, 3) == 6 - time.sleep(1) diff --git a/src/util/Diff.py b/src/util/Diff.py index 53b82c5a..3c325da8 100644 --- a/src/util/Diff.py +++ b/src/util/Diff.py @@ -1,4 +1,4 @@ -import io +import cStringIO as StringIO import difflib @@ -31,10 +31,8 @@ def diff(old, new, limit=False): def patch(old_f, actions): - new_f = io.BytesIO() + new_f = StringIO.StringIO() for action, param in actions: - if type(action) is bytes: - action = action.decode() if action == "=": # Same lines new_f.write(old_f.read(param)) elif action == "-": # Delete lines @@ -42,9 +40,5 @@ def patch(old_f, actions): continue elif action == "+": # Add lines for add_line in param: - if type(add_line) is str: - add_line = add_line.encode() new_f.write(add_line) - else: - raise "Unknown action: %s" % action return new_f diff --git a/src/util/Electrum.py b/src/util/Electrum.py deleted file mode 100644 index 112151aa..00000000 --- a/src/util/Electrum.py +++ /dev/null @@ -1,39 +0,0 @@ -import hashlib -import struct - - -# Electrum, the heck?! - -def bchr(i): - return struct.pack("B", i) - -def encode(val, base, minlen=0): - base, minlen = int(base), int(minlen) - code_string = b"".join([bchr(x) for x in range(256)]) - result = b"" - while val > 0: - index = val % base - result = code_string[index:index + 1] + result - val //= base - return code_string[0:1] * max(minlen - len(result), 0) + result - -def insane_int(x): - x = int(x) - if x < 253: - return bchr(x) - elif x < 65536: - return bchr(253) + encode(x, 256, 2)[::-1] - elif x < 4294967296: - return bchr(254) + encode(x, 256, 4)[::-1] - else: - return bchr(255) + encode(x, 256, 8)[::-1] - - -def magic(message): - return b"\x18Bitcoin Signed Message:\n" + insane_int(len(message)) + message - -def format(message): - return hashlib.sha256(magic(message)).digest() - -def dbl_format(message): - return hashlib.sha256(format(message)).digest() diff --git a/src/util/Event.py b/src/util/Event.py index 9d642736..b9614795 100644 --- a/src/util/Event.py +++ b/src/util/Event.py @@ -28,19 +28,19 @@ class Event(list): if __name__ == "__main__": def testBenchmark(): def say(pre, text): - print("%s Say: %s" % (pre, text)) + print "%s Say: %s" % (pre, text) import time s = time.time() on_changed = Event() for i in range(1000): on_changed.once(lambda pre: say(pre, "once"), "once") - print("Created 1000 once in %.3fs" % (time.time() - s)) + print "Created 1000 once in %.3fs" % (time.time() - s) on_changed("#1") def testUsage(): def say(pre, text): - print("%s Say: %s" % (pre, text)) + print "%s Say: %s" % (pre, text) on_changed = Event() on_changed.once(lambda pre: say(pre, "once")) diff --git a/src/util/Flag.py b/src/util/Flag.py deleted file mode 100644 index 37cfdfba..00000000 --- a/src/util/Flag.py +++ /dev/null @@ -1,22 +0,0 @@ -from collections import defaultdict - - -class Flag(object): - def __init__(self): - self.valid_flags = set([ - "admin", # Only allowed to run sites with ADMIN permission - "async_run", # Action will be ran async with gevent.spawn - "no_multiuser" # Action disabled if Multiuser plugin running in open proxy mode - ]) - self.db = defaultdict(set) - - def __getattr__(self, key): - def func(f): - if key not in self.valid_flags: - raise Exception("Invalid flag: %s (valid: %s)" % (key, self.valid_flags)) - self.db[f.__name__].add(key) - return f - return func - - -flag = Flag() diff --git a/src/util/GreenletManager.py b/src/util/GreenletManager.py deleted file mode 100644 index e024233d..00000000 --- a/src/util/GreenletManager.py +++ /dev/null @@ -1,24 +0,0 @@ -import gevent -from Debug import Debug - - -class GreenletManager: - def __init__(self): - self.greenlets = set() - - def spawnLater(self, *args, **kwargs): - greenlet = gevent.spawn_later(*args, **kwargs) - greenlet.link(lambda greenlet: self.greenlets.remove(greenlet)) - self.greenlets.add(greenlet) - return greenlet - - def spawn(self, *args, **kwargs): - greenlet = gevent.spawn(*args, **kwargs) - greenlet.link(lambda greenlet: self.greenlets.remove(greenlet)) - self.greenlets.add(greenlet) - return greenlet - - def stopGreenlets(self, reason="Stopping all greenlets"): - num = len(self.greenlets) - gevent.killall(list(self.greenlets), Debug.createNotifyType(reason), block=False) - return num diff --git a/src/util/Msgpack.py b/src/util/Msgpack.py deleted file mode 100644 index 1033f92e..00000000 --- a/src/util/Msgpack.py +++ /dev/null @@ -1,101 +0,0 @@ -import os -import struct -import io - -import msgpack -import msgpack.fallback - - -def msgpackHeader(size): - if size <= 2 ** 8 - 1: - return b"\xc4" + struct.pack("B", size) - elif size <= 2 ** 16 - 1: - return b"\xc5" + struct.pack(">H", size) - elif size <= 2 ** 32 - 1: - return b"\xc6" + struct.pack(">I", size) - else: - raise Exception("huge binary string") - - -def stream(data, writer): - packer = msgpack.Packer(use_bin_type=True) - writer(packer.pack_map_header(len(data))) - for key, val in data.items(): - writer(packer.pack(key)) - if isinstance(val, io.IOBase): # File obj - max_size = os.fstat(val.fileno()).st_size - val.tell() - size = min(max_size, val.read_bytes) - bytes_left = size - writer(msgpackHeader(size)) - buff = 1024 * 64 - while 1: - writer(val.read(min(bytes_left, buff))) - bytes_left = bytes_left - buff - if bytes_left <= 0: - break - else: # Simple - writer(packer.pack(val)) - return size - - -class FilePart(object): - __slots__ = ("file", "read_bytes", "__class__") - - def __init__(self, *args, **kwargs): - self.file = open(*args, **kwargs) - self.__enter__ == self.file.__enter__ - - def __getattr__(self, attr): - return getattr(self.file, attr) - - def __enter__(self, *args, **kwargs): - return self.file.__enter__(*args, **kwargs) - - def __exit__(self, *args, **kwargs): - return self.file.__exit__(*args, **kwargs) - - -# Don't try to decode the value of these fields as utf8 -bin_value_keys = ("hashfield_raw", "peers", "peers_ipv6", "peers_onion", "body", "sites", "bin") - - -def objectDecoderHook(obj): - global bin_value_keys - back = {} - for key, val in obj: - if type(key) is bytes: - key = key.decode("utf8") - if key in bin_value_keys or type(val) is not bytes or len(key) >= 64: - back[key] = val - else: - back[key] = val.decode("utf8") - return back - - -def getUnpacker(fallback=False, decode=True): - if fallback: # Pure Python - unpacker = msgpack.fallback.Unpacker - else: - unpacker = msgpack.Unpacker - - extra_kwargs = {"max_buffer_size": 5 * 1024 * 1024} - if msgpack.version[0] >= 1: - extra_kwargs["strict_map_key"] = False - - if decode: # Workaround for backward compatibility: Try to decode bin to str - unpacker = unpacker(raw=True, object_pairs_hook=objectDecoderHook, **extra_kwargs) - else: - unpacker = unpacker(raw=False, **extra_kwargs) - - return unpacker - - -def pack(data, use_bin_type=True): - return msgpack.packb(data, use_bin_type=use_bin_type) - - -def unpack(data, decode=True): - unpacker = getUnpacker(decode=decode) - unpacker.feed(data) - return next(unpacker) - diff --git a/src/util/Noparallel.py b/src/util/Noparallel.py index 4a4a854d..0ae9da68 100644 --- a/src/util/Noparallel.py +++ b/src/util/Noparallel.py @@ -1,25 +1,18 @@ import gevent import time -from gevent.event import AsyncResult - -from . import ThreadPool -class Noparallel: # Only allow function running once in same time +class Noparallel(object): # Only allow function running once in same time def __init__(self, blocking=True, ignore_args=False, ignore_class=False, queue=False): self.threads = {} self.blocking = blocking # Blocking: Acts like normal function else thread returned - self.queue = queue # Execute again when blocking is done - self.queued = False - self.ignore_args = ignore_args # Block does not depend on function call arguments - self.ignore_class = ignore_class # Block does not depeds on class instance + self.queue = queue + self.ignore_args = ignore_args + self.ignore_class = ignore_class def __call__(self, func): def wrapper(*args, **kwargs): - if not ThreadPool.isMainThread(): - return ThreadPool.main_loop.call(wrapper, *args, **kwargs) - if self.ignore_class: key = func # Unique key only by function and class object elif self.ignore_args: @@ -27,18 +20,13 @@ class Noparallel: # Only allow function running once in same time else: key = (func, tuple(args), str(kwargs)) # Unique key for function including parameters if key in self.threads: # Thread already running (if using blocking mode) - if self.queue: - self.queued = True thread = self.threads[key] if self.blocking: - if self.queued: - res = thread.get() # Blocking until its finished - if key in self.threads: - return self.threads[key].get() # Queue finished since started running - self.queued = False - return wrapper(*args, **kwargs) # Run again after the end + thread.join() # Blocking until its finished + if self.queue: + return wrapper(*args, **kwargs) # Run again else: - return thread.get() # Return the value + return thread.value # Return the value else: # No blocking if thread.ready(): # Its finished, create a new @@ -48,24 +36,16 @@ class Noparallel: # Only allow function running once in same time else: # Still running return thread else: # Thread not running + thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread + thread.link(lambda thread: self.cleanup(key, thread)) + self.threads[key] = thread if self.blocking: # Wait for finish - asyncres = AsyncResult() - self.threads[key] = asyncres - try: - res = func(*args, **kwargs) - asyncres.set(res) - self.cleanup(key, asyncres) - return res - except Exception as err: - asyncres.set_exception(err) - self.cleanup(key, asyncres) - raise(err) + thread.join() + ret = thread.value + return ret else: # No blocking just return the thread - thread = gevent.spawn(func, *args, **kwargs) # Spawning new thread - thread.link(lambda thread: self.cleanup(key, thread)) - self.threads[key] = thread return thread - wrapper.__name__ = func.__name__ + wrapper.func_name = func.func_name return wrapper @@ -76,14 +56,12 @@ class Noparallel: # Only allow function running once in same time if __name__ == "__main__": - - class Test(): @Noparallel() def count(self, num=5): for i in range(num): - print(self, i) + print self, i time.sleep(1) return "%s return:%s" % (self, i) @@ -92,59 +70,59 @@ if __name__ == "__main__": @Noparallel(blocking=False) def count(self, num=5): for i in range(num): - print(self, i) + print self, i time.sleep(1) return "%s return:%s" % (self, i) def testBlocking(): test = Test() test2 = Test() - print("Counting...") - print("Creating class1/thread1") + print "Counting..." + print "Creating class1/thread1" thread1 = gevent.spawn(test.count) - print("Creating class1/thread2 (ignored)") + print "Creating class1/thread2 (ignored)" thread2 = gevent.spawn(test.count) - print("Creating class2/thread3") + print "Creating class2/thread3" thread3 = gevent.spawn(test2.count) - print("Joining class1/thread1") + print "Joining class1/thread1" thread1.join() - print("Joining class1/thread2") + print "Joining class1/thread2" thread2.join() - print("Joining class2/thread3") + print "Joining class2/thread3" thread3.join() - print("Creating class1/thread4 (its finished, allowed again)") + print "Creating class1/thread4 (its finished, allowed again)" thread4 = gevent.spawn(test.count) - print("Joining thread4") + print "Joining thread4" thread4.join() - print(thread1.value, thread2.value, thread3.value, thread4.value) - print("Done.") + print thread1.value, thread2.value, thread3.value, thread4.value + print "Done." def testNoblocking(): test = TestNoblock() test2 = TestNoblock() - print("Creating class1/thread1") + print "Creating class1/thread1" thread1 = test.count() - print("Creating class1/thread2 (ignored)") + print "Creating class1/thread2 (ignored)" thread2 = test.count() - print("Creating class2/thread3") + print "Creating class2/thread3" thread3 = test2.count() - print("Joining class1/thread1") + print "Joining class1/thread1" thread1.join() - print("Joining class1/thread2") + print "Joining class1/thread2" thread2.join() - print("Joining class2/thread3") + print "Joining class2/thread3" thread3.join() - print("Creating class1/thread4 (its finished, allowed again)") + print "Creating class1/thread4 (its finished, allowed again)" thread4 = test.count() - print("Joining thread4") + print "Joining thread4" thread4.join() - print(thread1.value, thread2.value, thread3.value, thread4.value) - print("Done.") + print thread1.value, thread2.value, thread3.value, thread4.value + print "Done." def testBenchmark(): import time @@ -153,50 +131,21 @@ if __name__ == "__main__": import gc from greenlet import greenlet objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)] - print("Greenlets: %s" % len(objs)) + print "Greenlets: %s" % len(objs) printThreadNum() test = TestNoblock() s = time.time() for i in range(3): gevent.spawn(test.count, i + 1) - print("Created in %.3fs" % (time.time() - s)) + print "Created in %.3fs" % (time.time() - s) printThreadNum() time.sleep(5) - - def testException(): - import time - @Noparallel(blocking=True, queue=True) - def count(self, num=5): - s = time.time() - # raise Exception("err") - for i in range(num): - print(self, i) - time.sleep(1) - return "%s return:%s" % (s, i) - def caller(): - try: - print("Ret:", count(5)) - except Exception as err: - print("Raised:", repr(err)) - - gevent.joinall([ - gevent.spawn(caller), - gevent.spawn(caller), - gevent.spawn(caller), - gevent.spawn(caller) - ]) - - from gevent import monkey monkey.patch_all() - testException() - - """ testBenchmark() - print("Testing blocking mode...") + print "Testing blocking mode..." testBlocking() - print("Testing noblocking mode...") + print "Testing noblocking mode..." testNoblocking() - """ diff --git a/src/util/OpensslFindPatch.py b/src/util/OpensslFindPatch.py deleted file mode 100644 index 0f5d2dc6..00000000 --- a/src/util/OpensslFindPatch.py +++ /dev/null @@ -1,69 +0,0 @@ -import logging -import os -import sys -import ctypes.util - -from Config import config - -find_library_original = ctypes.util.find_library - - -def getOpensslPath(): - if config.openssl_lib_file: - return config.openssl_lib_file - - if sys.platform.startswith("win"): - lib_paths = [ - os.path.join(os.getcwd(), "tools/openssl/libeay32.dll"), # ZeroBundle Windows - os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1-x64.dll"), - os.path.join(os.path.dirname(sys.executable), "DLLs/libcrypto-1_1.dll") - ] - elif sys.platform == "cygwin": - lib_paths = ["/bin/cygcrypto-1.0.0.dll"] - else: - lib_paths = [ - "../runtime/lib/libcrypto.so.1.1", # ZeroBundle Linux - "../../Frameworks/libcrypto.1.1.dylib", # ZeroBundle macOS - "/opt/lib/libcrypto.so.1.0.0", # For optware and entware - "/usr/local/ssl/lib/libcrypto.so" - ] - - for lib_path in lib_paths: - if os.path.isfile(lib_path): - return lib_path - - if "ANDROID_APP_PATH" in os.environ: - try: - lib_dir = os.environ["ANDROID_APP_PATH"] + "/../../lib" - return [lib for lib in os.listdir(lib_dir) if "crypto" in lib][0] - except Exception as err: - logging.debug("OpenSSL lib not found in: %s (%s)" % (lib_dir, err)) - - if "LD_LIBRARY_PATH" in os.environ: - lib_dir_paths = os.environ["LD_LIBRARY_PATH"].split(":") - for path in lib_dir_paths: - try: - return [lib for lib in os.listdir(path) if "libcrypto.so" in lib][0] - except Exception as err: - logging.debug("OpenSSL lib not found in: %s (%s)" % (path, err)) - - lib_path = ( - find_library_original('ssl.so') or find_library_original('ssl') or - find_library_original('crypto') or find_library_original('libcrypto') or 'libeay32' - ) - - return lib_path - - -def patchCtypesOpensslFindLibrary(): - def findLibraryPatched(name): - if name in ("ssl", "crypto", "libeay32"): - lib_path = getOpensslPath() - return lib_path - else: - return find_library_original(name) - - ctypes.util.find_library = findLibraryPatched - - -patchCtypesOpensslFindLibrary() diff --git a/src/util/Platform.py b/src/util/Platform.py index 5bdde2f8..19477649 100644 --- a/src/util/Platform.py +++ b/src/util/Platform.py @@ -5,23 +5,11 @@ import logging def setMaxfilesopened(limit): try: if sys.platform == "win32": - import ctypes - dll = None - last_err = None - for dll_name in ["msvcr100", "msvcr110", "msvcr120"]: - try: - dll = getattr(ctypes.cdll, dll_name) - break - except OSError as err: - last_err = err - - if not dll: - raise last_err - - maxstdio = dll._getmaxstdio() + import win32file + maxstdio = win32file._getmaxstdio() if maxstdio < limit: - logging.debug("%s: Current maxstdio: %s, changing to %s..." % (dll, maxstdio, limit)) - dll._setmaxstdio(limit) + logging.debug("Current maxstdio: %s, changing to %s..." % (maxstdio, limit)) + win32file._setmaxstdio(limit) return True else: import resource @@ -31,6 +19,6 @@ def setMaxfilesopened(limit): resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) return True - except Exception as err: + except Exception, err: logging.error("Failed to modify max files open limit: %s" % err) return False diff --git a/src/util/Pooled.py b/src/util/Pooled.py index 9a4a7b63..b7751995 100644 --- a/src/util/Pooled.py +++ b/src/util/Pooled.py @@ -29,7 +29,7 @@ class Pooled(object): self.pooler_running = True gevent.spawn(self.pooler) return evt - wrapper.__name__ = func.__name__ + wrapper.func_name = func.func_name self.func = func return wrapper @@ -62,4 +62,4 @@ if __name__ == "__main__": s = time.time() gevent.joinall(threads) # Should take 10 second - print(time.time() - s) + print time.time() - s diff --git a/src/util/QueryJson.py b/src/util/QueryJson.py index d9921ff0..0eb56633 100644 --- a/src/util/QueryJson.py +++ b/src/util/QueryJson.py @@ -13,15 +13,12 @@ def queryFile(file_path, filter_path, filter_key=None, filter_val=None): if not data: return - if type(data) == list: - for row in data: - if filter_val: # Filter by value - if row[filter_key] == filter_val: - back.append(row) - else: + for row in data: + if filter_val: # Filter by value + if row[filter_key] == filter_val: back.append(row) - else: - back.append({"value": data}) + else: + back.append(row) return back @@ -64,4 +61,4 @@ def query(path_pattern, filter): if __name__ == "__main__": for row in list(query("../../data/12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH/data/users/*/data.json", "")): - print(row) + print row diff --git a/src/util/RateLimit.py b/src/util/RateLimit.py index 465859c2..2f1cf2d9 100644 --- a/src/util/RateLimit.py +++ b/src/util/RateLimit.py @@ -37,7 +37,7 @@ def delayLeft(event, allowed_again=10): def callQueue(event): func, args, kwargs, thread = queue_db[event] log.debug("Calling: %s" % event) - called(event) + del called_db[event] del queue_db[event] return func(*args, **kwargs) @@ -78,7 +78,8 @@ def call(event, allowed_again=10, func=None, *args, **kwargs): called(event, time_left) time.sleep(time_left) back = func(*args, **kwargs) - called(event) + if event in called_db: + del called_db[event] return back @@ -86,7 +87,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs): def rateLimitCleanup(): while 1: expired = time.time() - 60 * 2 # Cleanup if older than 2 minutes - for event in list(called_db.keys()): + for event in called_db.keys(): if called_db[event] < expired: del called_db[event] time.sleep(60 * 3) # Every 3 minutes @@ -99,30 +100,30 @@ if __name__ == "__main__": import random def publish(inner_path): - print("Publishing %s..." % inner_path) + print "Publishing %s..." % inner_path return 1 def cb(thread): - print("Value:", thread.value) + print "Value:", thread.value - print("Testing async spam requests rate limit to 1/sec...") + print "Testing async spam requests rate limit to 1/sec..." for i in range(3000): thread = callAsync("publish content.json", 1, publish, "content.json %s" % i) time.sleep(float(random.randint(1, 20)) / 100000) - print(thread.link(cb)) - print("Done") + print thread.link(cb) + print "Done" time.sleep(2) - print("Testing sync spam requests rate limit to 1/sec...") + print "Testing sync spam requests rate limit to 1/sec..." for i in range(5): call("publish data.json", 1, publish, "data.json %s" % i) time.sleep(float(random.randint(1, 100)) / 100) - print("Done") + print "Done" - print("Testing cleanup") + print "Testing cleanup" thread = callAsync("publish content.json single", 1, publish, "content.json single") - print("Needs to cleanup:", called_db, queue_db) - print("Waiting 3min for cleanup process...") + print "Needs to cleanup:", called_db, queue_db + print "Waiting 3min for cleanup process..." time.sleep(60 * 3) - print("Cleaned up:", called_db, queue_db) + print "Cleaned up:", called_db, queue_db diff --git a/src/util/SafeRe.py b/src/util/SafeRe.py deleted file mode 100644 index 6018e2d3..00000000 --- a/src/util/SafeRe.py +++ /dev/null @@ -1,32 +0,0 @@ -import re - - -class UnsafePatternError(Exception): - pass - -cached_patterns = {} - - -def isSafePattern(pattern): - if len(pattern) > 255: - raise UnsafePatternError("Pattern too long: %s characters in %s" % (len(pattern), pattern)) - - unsafe_pattern_match = re.search(r"[^\.][\*\{\+]", pattern) # Always should be "." before "*{+" characters to avoid ReDoS - if unsafe_pattern_match: - raise UnsafePatternError("Potentially unsafe part of the pattern: %s in %s" % (unsafe_pattern_match.group(0), pattern)) - - repetitions = re.findall(r"\.[\*\{\+]", pattern) - if len(repetitions) >= 10: - raise UnsafePatternError("More than 10 repetitions of %s in %s" % (repetitions[0], pattern)) - - return True - - -def match(pattern, *args, **kwargs): - cached_pattern = cached_patterns.get(pattern) - if cached_pattern: - return cached_pattern.match(*args, **kwargs) - else: - if isSafePattern(pattern): - cached_patterns[pattern] = re.compile(pattern) - return cached_patterns[pattern].match(*args, **kwargs) diff --git a/src/util/SocksProxy.py b/src/util/SocksProxy.py index f831137b..4c357134 100644 --- a/src/util/SocksProxy.py +++ b/src/util/SocksProxy.py @@ -1,6 +1,6 @@ import socket -import socks +from lib.PySocks import socks from Config import config def create_connection(address, timeout=None, source_address=None): diff --git a/src/util/SslPatch.py b/src/util/SslPatch.py new file mode 100644 index 00000000..b5d3fc55 --- /dev/null +++ b/src/util/SslPatch.py @@ -0,0 +1,122 @@ +# https://journal.paul.querna.org/articles/2011/04/05/openssl-memory-use/ +# Disable SSL compression to save massive memory and cpu + +import logging +import os + +from Config import config + + +def openLibrary(): + import ctypes + import ctypes.util + try: + if sys.platform.startswith("win"): + dll_path = "src/lib/opensslVerify/libeay32.dll" + elif sys.platform == "cygwin": + dll_path = "/bin/cygcrypto-1.0.0.dll" + else: + dll_path = "/usr/local/ssl/lib/libcrypto.so" + ssl = ctypes.CDLL(dll_path, ctypes.RTLD_GLOBAL) + assert ssl + except: + dll_path = ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') + ssl = ctypes.CDLL(dll_path or 'libeay32', ctypes.RTLD_GLOBAL) + return ssl + + +def disableSSLCompression(): + import ctypes + import ctypes.util + try: + openssl = openLibrary() + openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p + except Exception, err: + logging.debug("Disable SSL compression failed: %s (normal on Windows)" % err) + return False + + openssl.sk_zero.argtypes = [ctypes.c_void_p] + openssl.sk_zero(openssl.SSL_COMP_get_compression_methods()) + logging.debug("Disabled SSL compression on %s" % openssl) + + +if config.disable_sslcompression: + try: + disableSSLCompression() + except Exception, err: + logging.debug("Error disabling SSL compression: %s" % err) + + +# https://github.com/gevent/gevent/issues/477 +# Re-add sslwrap to Python 2.7.9 + +__ssl__ = __import__('ssl') + +try: + _ssl = __ssl__._ssl +except AttributeError: + _ssl = __ssl__._ssl2 + +OldSSLSocket = __ssl__.SSLSocket + + +class NewSSLSocket(OldSSLSocket): + # Fix SSLSocket constructor + + def __init__( + self, sock, keyfile=None, certfile=None, server_side=False, + cert_reqs=__ssl__.CERT_REQUIRED, ssl_version=2, ca_certs=None, + do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None, + server_hostname=None, _context=None + ): + OldSSLSocket.__init__( + self, sock, keyfile=keyfile, certfile=certfile, + server_side=server_side, cert_reqs=cert_reqs, + ssl_version=ssl_version, ca_certs=ca_certs, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, ciphers=ciphers + ) + + +def new_sslwrap( + sock, server_side=False, keyfile=None, certfile=None, + cert_reqs=__ssl__.CERT_NONE, ssl_version=__ssl__.PROTOCOL_SSLv23, + ca_certs=None, ciphers=None +): + context = __ssl__.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + context.options |= ssl.OP_NO_SSLv3 + context.verify_mode = cert_reqs or __ssl__.CERT_NONE + if ca_certs: + context.load_verify_locations(ca_certs) + if certfile: + context.load_cert_chain(certfile, keyfile) + if ciphers: + context.set_ciphers(ciphers) + + caller_self = inspect.currentframe().f_back.f_locals['self'] + return context._wrap_socket(sock, server_side=server_side, ssl_sock=caller_self) + + +# Re-add sslwrap to Python 2.7.9+ +if not hasattr(_ssl, 'sslwrap'): + import inspect + _ssl.sslwrap = new_sslwrap + __ssl__.SSLSocket = NewSSLSocket + logging.debug("Missing SSLwrap, readded.") + + +# Add SSLContext to gevent.ssl (Ubuntu 15 fix) +try: + import gevent + if not hasattr(gevent.ssl, "SSLContext"): + gevent.ssl.SSLContext = __ssl__.SSLContext + logging.debug("Missing SSLContext, readded.") +except Exception, err: + pass + +# Redirect insecure SSLv2 and v3 +__ssl__.PROTOCOL_SSLv2 = __ssl__.PROTOCOL_SSLv3 = __ssl__.PROTOCOL_SSLv23 + + +logging.debug("Python SSL version: %s" % __ssl__.OPENSSL_VERSION) diff --git a/src/util/StreamingMsgpack.py b/src/util/StreamingMsgpack.py new file mode 100644 index 00000000..5ec868c4 --- /dev/null +++ b/src/util/StreamingMsgpack.py @@ -0,0 +1,40 @@ +import os +import struct + +import msgpack + + +def msgpackHeader(size): + if size <= 2 ** 8 - 1: + return b"\xc4" + struct.pack("B", size) + elif size <= 2 ** 16 - 1: + return b"\xc5" + struct.pack(">H", size) + elif size <= 2 ** 32 - 1: + return b"\xc6" + struct.pack(">I", size) + else: + raise Exception("huge binary string") + + +def stream(data, writer): + packer = msgpack.Packer() + writer(packer.pack_map_header(len(data))) + for key, val in data.iteritems(): + writer(packer.pack(key)) + if issubclass(type(val), file): # File obj + max_size = os.fstat(val.fileno()).st_size - val.tell() + size = min(max_size, val.read_bytes) + bytes_left = size + writer(msgpackHeader(size)) + buff = 1024 * 64 + while 1: + writer(val.read(min(bytes_left, buff))) + bytes_left = bytes_left - buff + if bytes_left <= 0: + break + else: # Simple + writer(packer.pack(val)) + return size + + +class FilePart(file): + pass diff --git a/src/util/ThreadPool.py b/src/util/ThreadPool.py deleted file mode 100644 index 5b31ce37..00000000 --- a/src/util/ThreadPool.py +++ /dev/null @@ -1,180 +0,0 @@ -import threading -import time -import queue - -import gevent -import gevent.monkey -import gevent.threadpool -import gevent._threading - - -class ThreadPool: - def __init__(self, max_size, name=None): - self.setMaxSize(max_size) - if name: - self.name = name - else: - self.name = "ThreadPool#%s" % id(self) - - def setMaxSize(self, max_size): - self.max_size = max_size - if max_size > 0: - self.pool = gevent.threadpool.ThreadPool(max_size) - else: - self.pool = None - - def wrap(self, func): - if self.pool is None: - return func - - def wrapper(*args, **kwargs): - if not isMainThread(): # Call directly if not in main thread - return func(*args, **kwargs) - res = self.apply(func, args, kwargs) - return res - - return wrapper - - def spawn(self, *args, **kwargs): - if not isMainThread() and not self.pool._semaphore.ready(): - # Avoid semaphore error when spawning from other thread and the pool is full - return main_loop.call(self.spawn, *args, **kwargs) - res = self.pool.spawn(*args, **kwargs) - return res - - def apply(self, func, args=(), kwargs={}): - t = self.spawn(func, *args, **kwargs) - if self.pool._apply_immediately(): - return main_loop.call(t.get) - else: - return t.get() - - def kill(self): - if self.pool is not None and self.pool.size > 0 and main_loop: - main_loop.call(lambda: gevent.spawn(self.pool.kill).join(timeout=1)) - - del self.pool - self.pool = None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.kill() - - -lock_pool = gevent.threadpool.ThreadPool(50) -main_thread_id = threading.current_thread().ident - - -def isMainThread(): - return threading.current_thread().ident == main_thread_id - - -class Lock: - def __init__(self): - self.lock = gevent._threading.Lock() - self.locked = self.lock.locked - self.release = self.lock.release - self.time_lock = 0 - - def acquire(self, *args, **kwargs): - self.time_lock = time.time() - if self.locked() and isMainThread(): - # Start in new thread to avoid blocking gevent loop - return lock_pool.apply(self.lock.acquire, args, kwargs) - else: - return self.lock.acquire(*args, **kwargs) - - def __del__(self): - while self.locked(): - self.release() - - -class Event: - def __init__(self): - self.get_lock = Lock() - self.res = None - self.get_lock.acquire(False) - self.done = False - - def set(self, res): - if self.done: - raise Exception("Event already has value") - self.res = res - self.get_lock.release() - self.done = True - - def get(self): - if not self.done: - self.get_lock.acquire(True) - if self.get_lock.locked(): - self.get_lock.release() - back = self.res - return back - - def __del__(self): - self.res = None - while self.get_lock.locked(): - self.get_lock.release() - - -# Execute function calls in main loop from other threads -class MainLoopCaller(): - def __init__(self): - self.queue_call = queue.Queue() - - self.pool = gevent.threadpool.ThreadPool(1) - self.num_direct = 0 - self.running = True - - def caller(self, func, args, kwargs, event_done): - try: - res = func(*args, **kwargs) - event_done.set((True, res)) - except Exception as err: - event_done.set((False, err)) - - def start(self): - gevent.spawn(self.run) - time.sleep(0.001) - - def run(self): - while self.running: - if self.queue_call.qsize() == 0: # Get queue in new thread to avoid gevent blocking - func, args, kwargs, event_done = self.pool.apply(self.queue_call.get) - else: - func, args, kwargs, event_done = self.queue_call.get() - gevent.spawn(self.caller, func, args, kwargs, event_done) - del func, args, kwargs, event_done - self.running = False - - def call(self, func, *args, **kwargs): - if threading.current_thread().ident == main_thread_id: - return func(*args, **kwargs) - else: - event_done = Event() - self.queue_call.put((func, args, kwargs, event_done)) - success, res = event_done.get() - del event_done - self.queue_call.task_done() - if success: - return res - else: - raise res - - -def patchSleep(): # Fix memory leak by using real sleep in threads - real_sleep = gevent.monkey.get_original("time", "sleep") - - def patched_sleep(seconds): - if isMainThread(): - gevent.sleep(seconds) - else: - real_sleep(seconds) - time.sleep = patched_sleep - - -main_loop = MainLoopCaller() -main_loop.start() -patchSleep() diff --git a/src/util/UpnpPunch.py b/src/util/UpnpPunch.py index 18f4aaee..3694ba2b 100644 --- a/src/util/UpnpPunch.py +++ b/src/util/UpnpPunch.py @@ -1,13 +1,12 @@ import re -import urllib.request -import http.client +import urllib2 +import httplib import logging -from urllib.parse import urlparse +from urlparse import urlparse from xml.dom.minidom import parseString from xml.parsers.expat import ExpatError from gevent import socket -import gevent # Relevant UPnP spec: # http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf @@ -15,7 +14,6 @@ import gevent # General TODOs: # Handle 0 or >1 IGDs -logger = logging.getLogger("Upnp") class UpnpError(Exception): pass @@ -44,11 +42,11 @@ def perform_m_search(local_ip): 'MX: 2\r\n', 'ST: {0}\r\n'.format(search_target), '\r\n'] - ).encode("utf8") + ) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.bind((local_ip, 0)) + sock.bind((local_ip, 10000)) sock.sendto(ssdp_request, ('239.255.255.250', 1900)) if local_ip == "127.0.0.1": @@ -57,7 +55,7 @@ def perform_m_search(local_ip): sock.settimeout(5) try: - return sock.recv(2048).decode("utf8") + return sock.recv(2048) except socket.error: raise UpnpError("No reply from IGD using {} as IP".format(local_ip)) finally: @@ -85,7 +83,7 @@ def _retrieve_igd_profile(url): Retrieve the device's UPnP profile. """ try: - return urllib.request.urlopen(url.geturl(), timeout=5).read().decode('utf-8') + return urllib2.urlopen(url.geturl(), timeout=5).read().decode('utf-8') except socket.error: raise IGDError('IGD profile query timed out') @@ -128,55 +126,36 @@ def _parse_igd_profile(profile_xml): # add description def _get_local_ips(): - def method1(): - try: - # get local ip using UDP and a broadcast address - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) - # Not using because gevents getaddrinfo doesn't like that - # using port 1 as per hobbldygoop's comment about port 0 not working on osx: - # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 - s.connect(('239.255.255.250', 1)) - return [s.getsockname()[0]] - except: - pass - - def method2(): - # Get ip by using UDP and a normal address (google dns ip) - try: - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.connect(('8.8.8.8', 0)) - return [s.getsockname()[0]] - except: - pass - - def method3(): - # Get ip by '' hostname . Not supported on all platforms. - try: - return socket.gethostbyname_ex('')[2] - except: - pass - - threads = [ - gevent.spawn(method1), - gevent.spawn(method2), - gevent.spawn(method3) - ] - - gevent.joinall(threads, timeout=5) - local_ips = [] - for thread in threads: - if thread.value: - local_ips += thread.value + + # get local ip using UDP and a broadcast address + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + # Not using because gevents getaddrinfo doesn't like that + # using port 1 as per hobbldygoop's comment about port 0 not working on osx: + # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 + s.connect(('239.255.255.250', 1)) + local_ips.append(s.getsockname()[0]) + + # Get ip by using UDP and a normal address (google dns ip) + try: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(('8.8.8.8', 0)) + local_ips.append(s.getsockname()[0]) + except: + pass + + # Get ip by '' hostname . Not supported on all platforms. + try: + local_ips += socket.gethostbyname_ex('')[2] + except: + pass # Delete duplicates local_ips = list(set(local_ips)) + local_ips = sorted(local_ips, key=lambda a: a.startswith("192"), reverse=True) # Probably we looking for an ip starting with 192 - - # Probably we looking for an ip starting with 192 - local_ips = sorted(local_ips, key=lambda a: a.startswith("192"), reverse=True) - + logging.debug("Found local ips: %s" % local_ips) return local_ips @@ -232,10 +211,10 @@ def _create_close_message(local_ip, def _parse_for_errors(soap_response): - logger.debug(soap_response.status) + logging.debug(soap_response.status) if soap_response.status >= 400: response_data = soap_response.read() - logger.debug(response_data) + logging.debug(response_data) try: err_dom = parseString(response_data) err_code = _get_first_child_data(err_dom.getElementsByTagName( @@ -265,9 +244,9 @@ def _send_soap_request(location, upnp_schema, control_path, soap_fn, ), 'Content-Type': 'text/xml' } - logger.debug("Sending UPnP request to {0}:{1}...".format( + logging.debug("Sending UPnP request to {0}:{1}...".format( location.hostname, location.port)) - conn = http.client.HTTPConnection(location.hostname, location.port) + conn = httplib.HTTPConnection(location.hostname, location.port) conn.request('POST', control_path, soap_message, headers) response = conn.getresponse() @@ -296,7 +275,7 @@ def _send_requests(messages, location, upnp_schema, control_path): def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=("TCP", "UDP")): - logger.debug("Trying using local ip: %s" % ip) + logging.debug("Trying using local ip: %s" % ip) idg_data = _collect_idg_data(ip) soap_messages = [ @@ -316,51 +295,30 @@ def _communicate_with_igd(port=15441, Manage sending a message generated by 'fn'. """ - local_ips = _get_local_ips() + # Retry every ip 'retries' times + local_ips = _get_local_ips() * retries success = False - def job(local_ip): - for retry in range(retries): - try: - _orchestrate_soap_request(local_ip, port, fn, desc, protos) - return True - except Exception as e: - logger.debug('Upnp request using "{0}" failed: {1}'.format(local_ip, e)) - gevent.sleep(1) - return False - - threads = [] - for local_ip in local_ips: - job_thread = gevent.spawn(job, local_ip) - threads.append(job_thread) - gevent.sleep(0.1) - if any([thread.value for thread in threads]): + try: + _orchestrate_soap_request(local_ip, port, fn, desc, protos) success = True break - - # Wait another 10sec for competition or any positive result - for _ in range(10): - all_done = all([thread.value is not None for thread in threads]) - any_succeed = any([thread.value for thread in threads]) - if all_done or any_succeed: - break - gevent.sleep(1) - - if any([thread.value for thread in threads]): - success = True + except (UpnpError, IGDError) as e: + logging.debug('Upnp request using "{0}" failed: {1}'.format( + local_ip, e)) + success = False + continue if not success: raise UpnpError( 'Failed to communicate with igd using port {0} on local machine after {1} tries.'.format( port, retries)) - return success - def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): - logger.debug("Trying to open port %d." % port) - return _communicate_with_igd(port=port, + logging.debug("Trying to open port %d." % port) + _communicate_with_igd(port=port, desc=desc, retries=retries, fn=_create_open_message, @@ -368,28 +326,27 @@ def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UD def ask_to_close_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): - logger.debug("Trying to close port %d." % port) + logging.debug("Trying to close port %d." % port) # retries=1 because multiple successes cause 500 response and failure - return _communicate_with_igd(port=port, + _communicate_with_igd(port=port, desc=desc, - retries=retries, + retries=1, fn=_create_close_message, protos=protos) + if __name__ == "__main__": from gevent import monkey - monkey.patch_all() - logging.basicConfig(level=logging.DEBUG) + monkey.patch_socket() + logging.getLogger().setLevel(logging.DEBUG) import time s = time.time() - print("Opening port...") - print("Success:", ask_to_open_port(15443, "ZeroNet", protos=["TCP"])) - print("Done in", time.time() - s) - - - print("Closing port...") - print("Success:", ask_to_close_port(15443, "ZeroNet", protos=["TCP"])) - print("Done in", time.time() - s) + print "Opening port..." + print ask_to_open_port(15443, "ZeroNet", retries=3, protos=["TCP"]) + print "Done in", time.time()-s + print "Closing port..." + print ask_to_close_port(15443, "ZeroNet", retries=3, protos=["TCP"]) + print "Done in", time.time()-s diff --git a/src/util/__init__.py b/src/util/__init__.py index ab8a8b88..1c873327 100644 --- a/src/util/__init__.py +++ b/src/util/__init__.py @@ -1,4 +1,3 @@ -from .Cached import Cached -from .Event import Event -from .Noparallel import Noparallel -from .Pooled import Pooled +from Event import Event +from Noparallel import Noparallel +from Pooled import Pooled diff --git a/src/util/helper.py b/src/util/helper.py index 61455b08..ac3037af 100644 --- a/src/util/helper.py +++ b/src/util/helper.py @@ -7,14 +7,12 @@ import collections import time import logging import base64 -import json - import gevent from Config import config -def atomicWrite(dest, content, mode="wb"): +def atomicWrite(dest, content, mode="w"): try: with open(dest + "-tmpnew", mode) as f: f.write(content) @@ -22,63 +20,32 @@ def atomicWrite(dest, content, mode="wb"): os.fsync(f.fileno()) if os.path.isfile(dest + "-tmpold"): # Previous incomplete write os.rename(dest + "-tmpold", dest + "-tmpold-%s" % time.time()) - if os.path.isfile(dest): # Rename old file to -tmpold - os.rename(dest, dest + "-tmpold") + os.rename(dest, dest + "-tmpold") os.rename(dest + "-tmpnew", dest) - if os.path.isfile(dest + "-tmpold"): - os.unlink(dest + "-tmpold") # Remove old file + os.unlink(dest + "-tmpold") return True - except Exception as err: + except Exception, err: from Debug import Debug logging.error( - "File %s write failed: %s, (%s) reverting..." % - (dest, Debug.formatException(err), Debug.formatStack()) + "File %s write failed: %s, reverting..." % + (dest, Debug.formatException(err)) ) if os.path.isfile(dest + "-tmpold") and not os.path.isfile(dest): os.rename(dest + "-tmpold", dest) return False -def jsonDumps(data): - content = json.dumps(data, indent=1, sort_keys=True) - - # Make it a little more compact by removing unnecessary white space - def compact_dict(match): - if "\n" in match.group(0): - return match.group(0).replace(match.group(1), match.group(1).strip()) - else: - return match.group(0) - - content = re.sub(r"\{(\n[^,\[\{]{10,100000}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL) - - def compact_list(match): - if "\n" in match.group(0): - stripped_lines = re.sub("\n[ ]*", "", match.group(1)) - return match.group(0).replace(match.group(1), stripped_lines) - else: - return match.group(0) - - content = re.sub(r"\[([^\[\{]{2,100000}?)\][, ]{0,2}\n", compact_list, content, flags=re.DOTALL) - - # Remove end of line whitespace - content = re.sub(r"(?m)[ ]+$", "", content) - return content - - -def openLocked(path, mode="wb"): - try: - if os.name == "posix": - import fcntl - f = open(path, mode) - fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) - elif os.name == "nt": - import msvcrt - f = open(path, mode) - msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, 1) - else: - f = open(path, mode) - except (IOError, PermissionError, BlockingIOError) as err: - raise BlockingIOError("Unable to lock file: %s" % err) +def openLocked(path, mode="w"): + if os.name == "posix": + import fcntl + f = open(path, mode) + fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) + elif os.name == "nt": + import msvcrt + f = open(path, mode) + msvcrt.locking(f.fileno(), msvcrt.LK_NBLCK, -1) + else: + f = open(path, mode) return f @@ -95,18 +62,11 @@ def getFreeSpace(): ctypes.c_wchar_p(config.data_dir), None, None, ctypes.pointer(free_space_pointer) ) free_space = free_space_pointer.value - except Exception as err: + except Exception, err: logging.error("GetFreeSpace error: %s" % err) return free_space -def sqlquote(value): - if type(value) is int: - return str(value) - else: - return "'%s'" % value.replace("'", "''") - - def shellquote(*args): if len(args) == 1: return '"%s"' % args[0].replace('"', "") @@ -115,33 +75,27 @@ def shellquote(*args): def packPeers(peers): - packed_peers = {"ipv4": [], "ipv6": [], "onion": []} + packed_peers = {"ip4": [], "onion": []} for peer in peers: try: - ip_type = getIpType(peer.ip) - if ip_type in packed_peers: - packed_peers[ip_type].append(peer.packMyAddress()) + if peer.ip.endswith(".onion"): + packed_peers["onion"].append(peer.packMyAddress()) + else: + packed_peers["ip4"].append(peer.packMyAddress()) except Exception: - logging.debug("Error packing peer address: %s" % peer) + logging.error("Error packing peer address: %s" % peer) return packed_peers -# ip, port to packed 6byte or 18byte format +# ip, port to packed 6byte format def packAddress(ip, port): - if ":" in ip: - return socket.inet_pton(socket.AF_INET6, ip) + struct.pack("H", port) - else: - return socket.inet_aton(ip) + struct.pack("H", port) + return socket.inet_aton(ip) + struct.pack("H", port) -# From 6byte or 18byte format to ip, port +# From 6byte format to ip, port def unpackAddress(packed): - if len(packed) == 18: - return socket.inet_ntop(socket.AF_INET6, packed[0:16]), struct.unpack_from("H", packed, 16)[0] - else: - if len(packed) != 6: - raise Exception("Invalid length ip4 packed address: %s" % len(packed)) - return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] + assert len(packed) == 6, "Invalid length ip4 packed address: %s" % len(packed) + return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] # onion, port to packed 12byte format @@ -152,14 +106,14 @@ def packOnionAddress(onion, port): # From 12byte format to ip, port def unpackOnionAddress(packed): - return base64.b32encode(packed[0:-2]).lower().decode() + ".onion", struct.unpack("H", packed[-2:])[0] + return base64.b32encode(packed[0:-2]).lower() + ".onion", struct.unpack("H", packed[-2:])[0] # Get dir from file -# Return: data/site/content.json -> data/site/ +# Return: data/site/content.json -> data/site def getDirname(path): if "/" in path: - return path[:path.rfind("/") + 1].lstrip("/") + return path[:path.rfind("/") + 1] else: return "" @@ -170,17 +124,6 @@ def getFilename(path): return path[path.rfind("/") + 1:] -def getFilesize(path): - try: - s = os.stat(path) - except Exception: - return None - if stat.S_ISREG(s.st_mode): # Test if it's file - return s.st_size - else: - return None - - # Convert hash to hashid for hashfield def toHashId(hash): return int(hash[0:4], 16) @@ -190,7 +133,7 @@ def toHashId(hash): def mergeDicts(dicts): back = collections.defaultdict(set) for d in dicts: - for key, val in d.items(): + for key, val in d.iteritems(): back[key].update(val) return dict(back) @@ -198,16 +141,16 @@ def mergeDicts(dicts): # Request https url using gevent SSL error workaround def httpRequest(url, as_file=False): if url.startswith("http://"): - import urllib.request - response = urllib.request.urlopen(url) + import urllib + response = urllib.urlopen(url) else: # Hack to avoid Python gevent ssl errors import socket - import http.client + import httplib import ssl host, request = re.match("https://(.*?)(/.*?)$", url).groups() - conn = http.client.HTTPSConnection(host) + conn = httplib.HTTPSConnection(host) sock = socket.create_connection((conn.host, conn.port), conn.timeout, conn.source_address) conn.sock = ssl.wrap_socket(sock, conn.key_file, conn.cert_file) conn.request("GET", request) @@ -217,8 +160,8 @@ def httpRequest(url, as_file=False): response = httpRequest(response.getheader('Location')) if as_file: - import io - data = io.BytesIO() + import cStringIO as StringIO + data = StringIO.StringIO() while True: buff = response.read(1024 * 16) if not buff: @@ -235,7 +178,7 @@ def timerCaller(secs, func, *args, **kwargs): def timer(secs, func, *args, **kwargs): - return gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs) + gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs) def create_connection(address, timeout=None, source_address=None): @@ -245,112 +188,9 @@ def create_connection(address, timeout=None, source_address=None): sock = socket.create_connection_original(address, timeout, socket.bind_addr) return sock - def socketBindMonkeyPatch(bind_ip, bind_port): import socket logging.info("Monkey patching socket to bind to: %s:%s" % (bind_ip, bind_port)) socket.bind_addr = (bind_ip, int(bind_port)) socket.create_connection_original = socket.create_connection socket.create_connection = create_connection - - -def limitedGzipFile(*args, **kwargs): - import gzip - - class LimitedGzipFile(gzip.GzipFile): - def read(self, size=-1): - return super(LimitedGzipFile, self).read(1024 * 1024 * 25) - return LimitedGzipFile(*args, **kwargs) - - -def avg(items): - if len(items) > 0: - return sum(items) / len(items) - else: - return 0 - - -def isIp(ip): - if ":" in ip: # IPv6 - try: - socket.inet_pton(socket.AF_INET6, ip) - return True - except Exception: - return False - - else: # IPv4 - try: - socket.inet_aton(ip) - return True - except Exception: - return False - - -local_ip_pattern = re.compile(r"^127\.|192\.168\.|10\.|172\.1[6-9]\.|172\.2[0-9]\.|172\.3[0-1]\.|169\.254\.|::1$|fe80") -def isPrivateIp(ip): - return local_ip_pattern.match(ip) - - -def getIpType(ip): - if ip.endswith(".onion"): - return "onion" - elif ":" in ip: - return "ipv6" - elif re.match(r"[0-9\.]+$", ip): - return "ipv4" - else: - return "unknown" - - -def createSocket(ip, sock_type=socket.SOCK_STREAM): - ip_type = getIpType(ip) - if ip_type == "ipv6": - return socket.socket(socket.AF_INET6, sock_type) - else: - return socket.socket(socket.AF_INET, sock_type) - - -def getInterfaceIps(ip_type="ipv4"): - res = [] - if ip_type == "ipv6": - test_ips = ["ff0e::c", "2606:4700:4700::1111"] - else: - test_ips = ['239.255.255.250', "8.8.8.8"] - - for test_ip in test_ips: - try: - s = createSocket(test_ip, sock_type=socket.SOCK_DGRAM) - s.connect((test_ip, 1)) - res.append(s.getsockname()[0]) - except Exception: - pass - - try: - res += [ip[4][0] for ip in socket.getaddrinfo(socket.gethostname(), 1)] - except Exception: - pass - - res = [re.sub("%.*", "", ip) for ip in res if getIpType(ip) == ip_type and isIp(ip)] - return list(set(res)) - - -def cmp(a, b): - return (a > b) - (a < b) - - -def encodeResponse(func): # Encode returned data from utf8 to bytes - def wrapper(*args, **kwargs): - back = func(*args, **kwargs) - if "__next__" in dir(back): - for part in back: - if type(part) == bytes: - yield part - else: - yield part.encode() - else: - if type(back) == bytes: - yield back - else: - yield back.encode() - - return wrapper diff --git a/start.py b/start.py index 063d7802..48d4b21c 100644 --- a/start.py +++ b/start.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python2.7 # Included modules @@ -9,9 +9,8 @@ import zeronet def main(): - if "--open_browser" not in sys.argv: - sys.argv = [sys.argv[0]] + ["--open_browser", "default_browser"] + sys.argv[1:] - zeronet.start() + sys.argv = [sys.argv[0]]+["--open_browser", "default_browser"]+sys.argv[1:] + zeronet.main() if __name__ == '__main__': main() diff --git a/tools/coffee/coffee-script.js b/tools/coffee/coffee-script.js index 7fce39a6..63cc086b 100644 --- a/tools/coffee/coffee-script.js +++ b/tools/coffee/coffee-script.js @@ -1,405 +1,399 @@ /** - * CoffeeScript Compiler v1.12.6 + * CoffeeScript Compiler v1.12.4 * http://coffeescript.org * * Copyright 2011, Jeremy Ashkenas * Released under the MIT License */ -var $jscomp=$jscomp||{};$jscomp.scope={};$jscomp.checkStringArgs=function(u,xa,va){if(null==u)throw new TypeError("The 'this' value for String.prototype."+va+" must not be null or undefined");if(xa instanceof RegExp)throw new TypeError("First argument to String.prototype."+va+" must not be a regular expression");return u+""}; -$jscomp.defineProperty="function"==typeof Object.defineProperties?Object.defineProperty:function(u,xa,va){if(va.get||va.set)throw new TypeError("ES3 does not support getters and setters.");u!=Array.prototype&&u!=Object.prototype&&(u[xa]=va.value)};$jscomp.getGlobal=function(u){return"undefined"!=typeof window&&window===u?u:"undefined"!=typeof global&&null!=global?global:u};$jscomp.global=$jscomp.getGlobal(this); -$jscomp.polyfill=function(u,xa,va,f){if(xa){va=$jscomp.global;u=u.split(".");for(f=0;fu||1342177279>>=1)va+=va;return f}},"es6-impl","es3");$jscomp.findInternal=function(u,xa,va){u instanceof String&&(u=String(u));for(var f=u.length,qa=0;qau||1342177279>>=1)qa+=qa;return g}},"es6-impl","es3");$jscomp.findInternal=function(u,ya,qa){u instanceof String&&(u=String(u));for(var g=u.length,ua=0;ua>>=1,a+=a;return g};f.compact=function(a){var g,b;var n=[];var y=0;for(b=a.length;yc)return m.call(this,L,a-1);(w=L[0],0<=y.call(g,w))?c+=1:(l=L[0],0<=y.call(h,l))&&--c;a+=1}return a-1};l.prototype.removeLeadingNewlines=function(){var a,b;var m=this.tokens;var k=a=0;for(b=m.length;ag;f=0<=g?++b:--b){for(;"HERECOMMENT"===this.tag(l+f+c);)c+=2;if(null!=h[f]&&("string"===typeof h[f]&&(h[f]=[h[f]]),k=this.tag(l+f+c),0>y.call(h[f],k)))return-1}return l+f+c-1};l.prototype.looksObjectish=function(a){if(-1y.call(b,w))&&((f=this.tag(a),0>y.call(g,f))||this.tokens[a].generated)&&(n=this.tag(a),0>y.call(R,n)));)(k=this.tag(a),0<=y.call(h,k))&&c.push(this.tag(a)),(l=this.tag(a),0<=y.call(g, -l))&&c.length&&c.pop(),--a;return x=this.tag(a),0<=y.call(b,x)};l.prototype.addImplicitBracesAndParens=function(){var a=[];var l=null;return this.scanTokens(function(c,k,f){var m,w,n,r;var G=c[0];var K=(m=0y.call(h,a):return l[1];case "@"!==this.tag(k-2):return k-2;default:return k-1}}.call(this);"HERECOMMENT"===this.tag(q-2);)q-=2;this.insideForDeclaration="FOR"===u;m=0===q||(r=this.tag(q-1),0<=y.call(R,r))||f[q-1].newLine;if(B()&&(T=B(),r=T[0],v=T[1],("{"===r||"INDENT"===r&&"{"===this.tag(v-1))&&(m||","===this.tag(q-1)||"{"===this.tag(q-1))))return A(1);M(q,!!m);return A(2)}if(0<=y.call(R,G))for(M=a.length-1;0<=M;M+=-1)r=a[M],E(r)&&(r[2].sameLine= -!1);M="OUTDENT"===K||m.newLine;if(0<=y.call(x,G)||0<=y.call(z,G)&&M)for(;O();)if(M=B(),r=M[0],v=M[1],m=M[2],M=m.sameLine,m=m.startsLine,C()&&","!==K)S();else if(T()&&!this.insideForDeclaration&&M&&"TERMINATOR"!==G&&":"!==K)q();else if(!T()||"TERMINATOR"!==G||","===K||m&&this.looksObjectish(k+1))break;else{if("HERECOMMENT"===u)return A(1);q()}if(!(","!==G||this.looksObjectish(k+1)||!T()||this.insideForDeclaration||"TERMINATOR"===u&&this.looksObjectish(k+2)))for(u="OUTDENT"===u?1:0;T();)q(k+u);return A(1)})}; -l.prototype.addLocationDataToGeneratedTokens=function(){return this.scanTokens(function(a,b,g){var c,l;if(a[2]||!a.generated&&!a.explicit)return 1;if("{"===a[0]&&(c=null!=(l=g[b+1])?l[2]:void 0)){var m=c.first_line;c=c.first_column}else(c=null!=(m=g[b-1])?m[2]:void 0)?(m=c.last_line,c=c.last_column):m=c=0;a[2]={first_line:m,first_column:c,last_line:m,last_column:c};return 1})};l.prototype.fixOutdentLocationData=function(){return this.scanTokens(function(a,b,g){if(!("OUTDENT"===a[0]||a.generated&& -"CALL_END"===a[0]||a.generated&&"}"===a[0]))return 1;b=g[b-1][2];a[2]={first_line:b.last_line,first_column:b.last_column,last_line:b.last_line,last_column:b.last_column};return 1})};l.prototype.normalizeLines=function(){var b,g;var l=b=g=null;var k=function(a,b){var c,g,k,f;return";"!==a[1]&&(c=a[0],0<=y.call(O,c))&&!("TERMINATOR"===a[0]&&(g=this.tag(b+1),0<=y.call(H,g)))&&!("ELSE"===a[0]&&"THEN"!==l)&&!!("CATCH"!==(k=a[0])&&"FINALLY"!==k||"-\x3e"!==l&&"\x3d\x3e"!==l)||(f=a[0],0<=y.call(z,f))&&(this.tokens[b- -1].newLine||"OUTDENT"===this.tokens[b-1][0])};var f=function(a,b){return this.tokens.splice(","===this.tag(b-1)?b-1:b,0,g)};return this.scanTokens(function(c,m,h){var w,n,r;c=c[0];if("TERMINATOR"===c){if("ELSE"===this.tag(m+1)&&"OUTDENT"!==this.tag(m-1))return h.splice.apply(h,[m,1].concat(a.call(this.indentation()))),1;if(w=this.tag(m+1),0<=y.call(H,w))return h.splice(m,1),0}if("CATCH"===c)for(w=n=1;2>=n;w=++n)if("OUTDENT"===(r=this.tag(m+w))||"TERMINATOR"===r||"FINALLY"===r)return h.splice.apply(h, -[m+w,0].concat(a.call(this.indentation()))),2+w;0<=y.call(J,c)&&"INDENT"!==this.tag(m+1)&&("ELSE"!==c||"IF"!==this.tag(m+1))&&(l=c,r=this.indentation(h[m]),b=r[0],g=r[1],"THEN"===l&&(b.fromThen=!0),h.splice(m+1,0,b),this.detectEnd(m+2,k,f),"THEN"===c&&h.splice(m,1));return 1})};l.prototype.tagPostfixConditionals=function(){var a=null;var b=function(a,b){a=a[0];b=this.tokens[b-1][0];return"TERMINATOR"===a||"INDENT"===a&&0>y.call(J,b)};var g=function(b,c){if("INDENT"!==b[0]||b.generated&&!b.fromThen)return a[0]= -"POST_"+a[0]};return this.scanTokens(function(c,l){if("IF"!==c[0])return 1;a=c;this.detectEnd(l+1,b,g);return 1})};l.prototype.indentation=function(a){var b=["INDENT",2];var c=["OUTDENT",2];a?(b.generated=c.generated=!0,b.origin=c.origin=a):b.explicit=c.explicit=!0;return[b,c]};l.prototype.generate=b;l.prototype.tag=function(a){var b;return null!=(b=this.tokens[a])?b[0]:void 0};return l}();var ya=[["(",")"],["[","]"],["{","}"],["INDENT","OUTDENT"],["CALL_START","CALL_END"],["PARAM_START","PARAM_END"], -["INDEX_START","INDEX_END"],["STRING_START","STRING_END"],["REGEX_START","REGEX_END"]];f.INVERSES=u={};var g=[];var h=[];var r=0;for(q=ya.length;rthis.indent){if(c||"RETURN"===this.tag())return this.indebt=b-this.indent,this.suppressNewlines(),a.length;if(!this.tokens.length)return this.baseIndent= -this.indent=b,a.length;c=b-this.indent+this.outdebt;this.token("INDENT",c,a.length-b,b);this.indents.push(c);this.ends.push({tag:"OUTDENT"});this.outdebt=this.indebt=0;this.indent=b}else bl&&(m=this.token("+","+"),m[2]={first_line:w[2].first_line,first_column:w[2].first_column,last_line:w[2].first_line,last_column:w[2].first_column});(f=this.tokens).push.apply(f,r)}if(k)return a=a[a.length-1],k.origin=["STRING",null,{first_line:k[2].first_line,first_column:k[2].first_column,last_line:a[2].last_line,last_column:a[2].last_column}],k=this.token("STRING_END",")"),k[2]={first_line:a[2].last_line,first_column:a[2].last_column, -last_line:a[2].last_line,last_column:a[2].last_column}};a.prototype.pair=function(a){var b=this.ends;b=b[b.length-1];return a!==(b=null!=b?b.tag:void 0)?("OUTDENT"!==b&&this.error("unmatched "+a),b=this.indents,b=b[b.length-1],this.outdentToken(b,!0),this.pair(a)):this.ends.pop()};a.prototype.getLineAndColumnFromChunk=function(a){if(0===a)return[this.chunkLine,this.chunkColumn];var b=a>=this.chunk.length?this.chunk:this.chunk.slice(0,+(a-1)+1||9E9);a=g(b,"\n");var c=this.chunkColumn;0a)return b(a);var c=Math.floor((a-65536)/1024)+55296;a=(a-65536)%1024+56320;return""+b(c)+b(a)};a.prototype.replaceUnicodeCodePointEscapes= -function(a,b){return a.replace(sa,function(a){return function(c,g,k,h){if(g)return g;c=parseInt(k,16);1114111q.call(y.call(I).concat(y.call(F)),a):return"keyword '"+b+"' can't be assigned";case 0>q.call(O, -a):return"'"+b+"' can't be assigned";case 0>q.call(J,a):return"reserved word '"+b+"' can't be assigned";default:return!1}};f.isUnassignable=B;var H=function(a){var b;return"IDENTIFIER"===a[0]?("from"===a[1]&&(a[1][0]="IDENTIFIER",!0),!0):"FOR"===a[0]?!1:"{"===(b=a[1])||"["===b||","===b||":"===b?!1:!0};var I="true false null this new delete typeof in instanceof return throw break continue debugger yield if else switch for while do try catch finally class extends super import export default".split(" "); -var F="undefined Infinity NaN then unless until loop of by when".split(" ");var Q={and:"\x26\x26",or:"||",is:"\x3d\x3d",isnt:"!\x3d",not:"!",yes:"true",no:"false",on:"true",off:"false"};var x=function(){var a=[];for(qa in Q)a.push(qa);return a}();F=F.concat(x);var J="case function var void with const let enum native implements interface package private protected public static".split(" ");var O=["arguments","eval"];f.JS_FORBIDDEN=I.concat(J).concat(O);var R=65279;var z=/^(?!\d)((?:(?!\s)[$\w\x7f-\uffff])+)([^\n\S]*:(?!:))?/; -var l=/^0b[01]+|^0o[0-7]+|^0x[\da-f]+|^\d*\.?\d+(?:e[+-]?\d+)?/i;var c=/^(?:[-=]>|[-+*\/%<>&|^!?=]=|>>>=?|([-+:])\1|([&|<>*\/%])\2=?|\?(\.|::)|\.{2,3})/;var w=/^[^\n\S]+/;var m=/^###([^#][\s\S]*?)(?:###[^\n\S]*|###$)|^(?:\s*#(?!##[^#]).*)+/;var k=/^[-=]>/;var K=/^(?:\n[^\n\S]*)+/;var P=/^`(?!``)((?:[^`\\]|\\[\s\S])*)`/;var L=/^```((?:[^`\\]|\\[\s\S]|`(?!``))*)```/;var V=/^(?:'''|"""|'|")/;var X=/^(?:[^\\']|\\[\s\S])*/;var G=/^(?:[^\\"#]|\\[\s\S]|\#(?!\{))*/;var aa=/^(?:[^\\']|\\[\s\S]|'(?!''))*/; -var U=/^(?:[^\\"#]|\\[\s\S]|"(?!"")|\#(?!\{))*/;var W=/((?:\\\\)+)|\\[^\S\n]*\n\s*/g;var D=/\s*\n\s*/g;var A=/\n+([^\n\S]*)(?=\S)/g;var fc=/^\/(?!\/)((?:[^[\/\n\\]|\\[^\n]|\[(?:\\[^\n]|[^\]\n\\])*\])*)(\/)?/;var E=/^\w*/;var ba=/^(?!.*(.).*\1)[imguy]*$/;var ca=/^(?:[^\\\/#]|\\[\s\S]|\/(?!\/\/)|\#(?!\{))*/;var C=/((?:\\\\)+)|\\(\s)|\s+(?:#.*)?/g;var T=/^(\/|\/{3}\s*)(\*)/;var v=/^\/=?\s/;var Y=/\*\//;var S=/^\s*(?:,|\??\.(?![.\d])|::)/;var M=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7]|[1-7])|(x(?![\da-fA-F]{2}).{0,2})|(u\{(?![\da-fA-F]{1,}\})[^}]*\}?)|(u(?!\{|[\da-fA-F]{4}).{0,4}))/; -var va=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7])|(x(?![\da-fA-F]{2}).{0,2})|(u\{(?![\da-fA-F]{1,}\})[^}]*\}?)|(u(?!\{|[\da-fA-F]{4}).{0,4}))/;var sa=/(\\\\)|\\u\{([\da-fA-F]+)\}/g;var za=/^[^\n\S]*\n/;var ma=/\n[^\n\S]*$/;var Z=/\s+$/;var fa="-\x3d +\x3d /\x3d *\x3d %\x3d ||\x3d \x26\x26\x3d ?\x3d \x3c\x3c\x3d \x3e\x3e\x3d \x3e\x3e\x3e\x3d \x26\x3d ^\x3d |\x3d **\x3d //\x3d %%\x3d".split(" ");var ia=["NEW","TYPEOF","DELETE","DO"];var ga=["!","~"];var ja=["\x3c\x3c","\x3e\x3e","\x3e\x3e\x3e"];var la="\x3d\x3d !\x3d \x3c \x3e \x3c\x3d \x3e\x3d".split(" "); -var oa=["*","/","%","//","%%"];var pa=["IN","OF","INSTANCEOF"];var ha="IDENTIFIER PROPERTY ) ] ? @ THIS SUPER".split(" ");var ka=ha.concat("NUMBER INFINITY NAN STRING STRING_END REGEX REGEX_END BOOL NULL UNDEFINED } ::".split(" "));var na=ka.concat(["++","--"]);var ra=["INDENT","OUTDENT","TERMINATOR"];var da=[")","}","]"]}).call(this);return f}();u["./parser"]=function(){var f={},qa={exports:f},q=function(){function f(){this.yy={}}var a=function(a,p,t,d){t=t||{};for(d=a.length;d--;t[a[d]]=p);return t}, -b=[1,22],u=[1,25],g=[1,83],h=[1,79],r=[1,84],n=[1,85],B=[1,81],H=[1,82],I=[1,56],F=[1,58],Q=[1,59],x=[1,60],J=[1,61],O=[1,62],R=[1,49],z=[1,50],l=[1,32],c=[1,68],w=[1,69],m=[1,78],k=[1,47],K=[1,51],P=[1,52],L=[1,67],V=[1,65],X=[1,66],G=[1,64],aa=[1,42],U=[1,48],W=[1,63],D=[1,73],A=[1,74],q=[1,75],E=[1,76],ba=[1,46],ca=[1,72],C=[1,34],T=[1,35],v=[1,36],Y=[1,37],S=[1,38],M=[1,39],qa=[1,86],sa=[1,6,32,42,131],za=[1,101],ma=[1,89],Z=[1,88],fa=[1,87],ia=[1,90],ga=[1,91],ja=[1,92],la=[1,93],oa=[1,94],pa= -[1,95],ha=[1,96],ka=[1,97],na=[1,98],ra=[1,99],da=[1,100],va=[1,104],N=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],xa=[2,166],ta=[1,110],Na=[1,111],Fa=[1,112],Ga=[1,113],Ca=[1,115],Pa=[1,116],Ia=[1,109],Ea=[1,6,32,42,131,133,135,139,156],Va=[2,27],ea=[1,123],Ya=[1,121],Ba=[1,6,31,32,40,41,42,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172, -173,174],Ha=[2,94],t=[1,6,31,32,42,46,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],p=[2,73],d=[1,128],wa=[1,133],e=[1,134],Da=[1,136],Ta=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ua=[2,91],Eb=[1,6,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168, -169,170,171,172,173,174],Za=[2,63],Fb=[1,166],$a=[1,178],Ua=[1,180],Gb=[1,175],Oa=[1,182],sb=[1,184],La=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],Hb=[2,110],Ib=[1,6,31,32,40,41,42,58,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Jb=[1,6,31,32,40,41,42,46,58,65,70,73,82,83,84, -85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Kb=[40,41,114],Lb=[1,241],tb=[1,240],Ma=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156],Ja=[2,71],Mb=[1,250],Sa=[6,31,32,65,70],fb=[6,31,32,55,65,70,73],ab=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,164,166,167,168,169,170,171,172,173,174],Nb=[40,41,82,83,84,85,87,90,113,114],gb=[1,269],bb=[2,62],hb=[1,279],Wa=[1,281],ub=[1, -286],cb=[1,288],Ob=[2,187],vb=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ib=[1,297],Qa=[6,31,32,70,115,120],Pb=[1,6,31,32,40,41,42,55,58,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],Qb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,140,156],Xa=[1,6,31,32, -42,65,70,73,89,94,115,120,122,131,134,140,156],jb=[146,147,148],kb=[70,146,147,148],lb=[6,31,94],Rb=[1,311],Aa=[6,31,32,70,94],Sb=[6,31,32,58,70,94],wb=[6,31,32,55,58,70,94],Tb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,166,167,168,169,170,171,172,173,174],Ub=[12,28,34,38,40,41,44,45,48,49,50,51,52,53,61,62,63,67,68,89,92,95,97,105,112,117,118,119,125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Vb=[2,176],Ra=[6,31,32],db=[2,72],Wb=[1,323],Xb=[1,324], -Yb=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,128,131,133,134,135,139,140,151,153,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],mb=[32,151,153],Zb=[1,6,32,42,65,70,73,89,94,115,120,122,131,134,140,156],nb=[1,350],xb=[1,356],yb=[1,6,32,42,131,156],eb=[2,86],ob=[1,367],pb=[1,368],$b=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,151,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],zb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,140,156],ac= -[1,381],bc=[1,382],Ab=[6,31,32,94],cc=[6,31,32,70],Bb=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],dc=[31,70],qb=[1,408],rb=[1,409],Cb=[1,415],Db=[1,416],ec={trace:function(){},yy:{},symbols_:{error:2,Root:3,Body:4,Line:5,TERMINATOR:6,Expression:7,Statement:8,YieldReturn:9,Return:10,Comment:11,STATEMENT:12,Import:13,Export:14,Value:15,Invocation:16,Code:17,Operation:18,Assign:19,If:20,Try:21,While:22,For:23,Switch:24, -Class:25,Throw:26,Yield:27,YIELD:28,FROM:29,Block:30,INDENT:31,OUTDENT:32,Identifier:33,IDENTIFIER:34,Property:35,PROPERTY:36,AlphaNumeric:37,NUMBER:38,String:39,STRING:40,STRING_START:41,STRING_END:42,Regex:43,REGEX:44,REGEX_START:45,REGEX_END:46,Literal:47,JS:48,UNDEFINED:49,NULL:50,BOOL:51,INFINITY:52,NAN:53,Assignable:54,"\x3d":55,AssignObj:56,ObjAssignable:57,":":58,SimpleObjAssignable:59,ThisProperty:60,RETURN:61,HERECOMMENT:62,PARAM_START:63,ParamList:64,PARAM_END:65,FuncGlyph:66,"-\x3e":67, -"\x3d\x3e":68,OptComma:69,",":70,Param:71,ParamVar:72,"...":73,Array:74,Object:75,Splat:76,SimpleAssignable:77,Accessor:78,Parenthetical:79,Range:80,This:81,".":82,"?.":83,"::":84,"?::":85,Index:86,INDEX_START:87,IndexValue:88,INDEX_END:89,INDEX_SOAK:90,Slice:91,"{":92,AssignList:93,"}":94,CLASS:95,EXTENDS:96,IMPORT:97,ImportDefaultSpecifier:98,ImportNamespaceSpecifier:99,ImportSpecifierList:100,ImportSpecifier:101,AS:102,DEFAULT:103,IMPORT_ALL:104,EXPORT:105,ExportSpecifierList:106,EXPORT_ALL:107, -ExportSpecifier:108,OptFuncExist:109,Arguments:110,Super:111,SUPER:112,FUNC_EXIST:113,CALL_START:114,CALL_END:115,ArgList:116,THIS:117,"@":118,"[":119,"]":120,RangeDots:121,"..":122,Arg:123,SimpleArgs:124,TRY:125,Catch:126,FINALLY:127,CATCH:128,THROW:129,"(":130,")":131,WhileSource:132,WHILE:133,WHEN:134,UNTIL:135,Loop:136,LOOP:137,ForBody:138,FOR:139,BY:140,ForStart:141,ForSource:142,ForVariables:143,OWN:144,ForValue:145,FORIN:146,FOROF:147,FORFROM:148,SWITCH:149,Whens:150,ELSE:151,When:152,LEADING_WHEN:153, -IfBlock:154,IF:155,POST_IF:156,UNARY:157,UNARY_MATH:158,"-":159,"+":160,"--":161,"++":162,"?":163,MATH:164,"**":165,SHIFT:166,COMPARE:167,"\x26":168,"^":169,"|":170,"\x26\x26":171,"||":172,"BIN?":173,RELATION:174,COMPOUND_ASSIGN:175,$accept:0,$end:1},terminals_:{2:"error",6:"TERMINATOR",12:"STATEMENT",28:"YIELD",29:"FROM",31:"INDENT",32:"OUTDENT",34:"IDENTIFIER",36:"PROPERTY",38:"NUMBER",40:"STRING",41:"STRING_START",42:"STRING_END",44:"REGEX",45:"REGEX_START",46:"REGEX_END",48:"JS",49:"UNDEFINED", -50:"NULL",51:"BOOL",52:"INFINITY",53:"NAN",55:"\x3d",58:":",61:"RETURN",62:"HERECOMMENT",63:"PARAM_START",65:"PARAM_END",67:"-\x3e",68:"\x3d\x3e",70:",",73:"...",82:".",83:"?.",84:"::",85:"?::",87:"INDEX_START",89:"INDEX_END",90:"INDEX_SOAK",92:"{",94:"}",95:"CLASS",96:"EXTENDS",97:"IMPORT",102:"AS",103:"DEFAULT",104:"IMPORT_ALL",105:"EXPORT",107:"EXPORT_ALL",112:"SUPER",113:"FUNC_EXIST",114:"CALL_START",115:"CALL_END",117:"THIS",118:"@",119:"[",120:"]",122:"..",125:"TRY",127:"FINALLY",128:"CATCH", -129:"THROW",130:"(",131:")",133:"WHILE",134:"WHEN",135:"UNTIL",137:"LOOP",139:"FOR",140:"BY",144:"OWN",146:"FORIN",147:"FOROF",148:"FORFROM",149:"SWITCH",151:"ELSE",153:"LEADING_WHEN",155:"IF",156:"POST_IF",157:"UNARY",158:"UNARY_MATH",159:"-",160:"+",161:"--",162:"++",163:"?",164:"MATH",165:"**",166:"SHIFT",167:"COMPARE",168:"\x26",169:"^",170:"|",171:"\x26\x26",172:"||",173:"BIN?",174:"RELATION",175:"COMPOUND_ASSIGN"},productions_:[0,[3,0],[3,1],[4,1],[4,3],[4,2],[5,1],[5,1],[5,1],[8,1],[8,1],[8, -1],[8,1],[8,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[27,1],[27,2],[27,3],[30,2],[30,3],[33,1],[35,1],[37,1],[37,1],[39,1],[39,3],[43,1],[43,3],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[19,3],[19,4],[19,5],[56,1],[56,3],[56,5],[56,3],[56,5],[56,1],[59,1],[59,1],[59,1],[57,1],[57,1],[10,2],[10,1],[9,3],[9,2],[11,1],[17,5],[17,2],[66,1],[66,1],[69,0],[69,1],[64,0],[64,1],[64,3],[64,4],[64,6],[71,1],[71,2],[71,3],[71,1],[72,1],[72,1],[72,1],[72, -1],[76,2],[77,1],[77,2],[77,2],[77,1],[54,1],[54,1],[54,1],[15,1],[15,1],[15,1],[15,1],[15,1],[78,2],[78,2],[78,2],[78,2],[78,1],[78,1],[86,3],[86,2],[88,1],[88,1],[75,4],[93,0],[93,1],[93,3],[93,4],[93,6],[25,1],[25,2],[25,3],[25,4],[25,2],[25,3],[25,4],[25,5],[13,2],[13,4],[13,4],[13,5],[13,7],[13,6],[13,9],[100,1],[100,3],[100,4],[100,4],[100,6],[101,1],[101,3],[101,1],[101,3],[98,1],[99,3],[14,3],[14,5],[14,2],[14,4],[14,5],[14,6],[14,3],[14,4],[14,7],[106,1],[106,3],[106,4],[106,4],[106,6],[108, -1],[108,3],[108,3],[108,1],[108,3],[16,3],[16,3],[16,3],[16,1],[111,1],[111,2],[109,0],[109,1],[110,2],[110,4],[81,1],[81,1],[60,2],[74,2],[74,4],[121,1],[121,1],[80,5],[91,3],[91,2],[91,2],[91,1],[116,1],[116,3],[116,4],[116,4],[116,6],[123,1],[123,1],[123,1],[124,1],[124,3],[21,2],[21,3],[21,4],[21,5],[126,3],[126,3],[126,2],[26,2],[79,3],[79,5],[132,2],[132,4],[132,2],[132,4],[22,2],[22,2],[22,2],[22,1],[136,2],[136,2],[23,2],[23,2],[23,2],[138,2],[138,4],[138,2],[141,2],[141,3],[145,1],[145,1], -[145,1],[145,1],[143,1],[143,3],[142,2],[142,2],[142,4],[142,4],[142,4],[142,6],[142,6],[142,2],[142,4],[24,5],[24,7],[24,4],[24,6],[150,1],[150,2],[152,3],[152,4],[154,3],[154,5],[20,1],[20,3],[20,3],[20,3],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,5],[18,4],[18,3]],performAction:function(a,p,t,d,wa,b,e){a=b.length-1;switch(wa){case 1:return this.$=d.addLocationDataFn(e[a],e[a])(new d.Block); -case 2:return this.$=b[a];case 3:this.$=d.addLocationDataFn(e[a],e[a])(d.Block.wrap([b[a]]));break;case 4:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].push(b[a]));break;case 5:this.$=b[a-1];break;case 6:case 7:case 8:case 9:case 10:case 12:case 13:case 14:case 15:case 16:case 17:case 18:case 19:case 20:case 21:case 22:case 23:case 24:case 25:case 26:case 35:case 40:case 42:case 56:case 57:case 58:case 59:case 60:case 61:case 71:case 72:case 82:case 83:case 84:case 85:case 90:case 91:case 94:case 98:case 104:case 163:case 187:case 188:case 190:case 220:case 221:case 239:case 245:this.$= -b[a];break;case 11:this.$=d.addLocationDataFn(e[a],e[a])(new d.StatementLiteral(b[a]));break;case 27:this.$=d.addLocationDataFn(e[a],e[a])(new d.Op(b[a],new d.Value(new d.Literal(""))));break;case 28:case 249:case 250:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op(b[a-1],b[a]));break;case 29:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op(b[a-2].concat(b[a-1]),b[a]));break;case 30:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Block);break;case 31:case 105:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a- -1]);break;case 32:this.$=d.addLocationDataFn(e[a],e[a])(new d.IdentifierLiteral(b[a]));break;case 33:this.$=d.addLocationDataFn(e[a],e[a])(new d.PropertyName(b[a]));break;case 34:this.$=d.addLocationDataFn(e[a],e[a])(new d.NumberLiteral(b[a]));break;case 36:this.$=d.addLocationDataFn(e[a],e[a])(new d.StringLiteral(b[a]));break;case 37:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.StringWithInterpolations(b[a-1]));break;case 38:this.$=d.addLocationDataFn(e[a],e[a])(new d.RegexLiteral(b[a]));break; -case 39:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.RegexWithInterpolations(b[a-1].args));break;case 41:this.$=d.addLocationDataFn(e[a],e[a])(new d.PassthroughLiteral(b[a]));break;case 43:this.$=d.addLocationDataFn(e[a],e[a])(new d.UndefinedLiteral);break;case 44:this.$=d.addLocationDataFn(e[a],e[a])(new d.NullLiteral);break;case 45:this.$=d.addLocationDataFn(e[a],e[a])(new d.BooleanLiteral(b[a]));break;case 46:this.$=d.addLocationDataFn(e[a],e[a])(new d.InfinityLiteral(b[a]));break;case 47:this.$= -d.addLocationDataFn(e[a],e[a])(new d.NaNLiteral);break;case 48:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(b[a-2],b[a]));break;case 49:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Assign(b[a-3],b[a]));break;case 50:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(b[a-4],b[a-1]));break;case 51:case 87:case 92:case 93:case 95:case 96:case 97:case 222:case 223:this.$=d.addLocationDataFn(e[a],e[a])(new d.Value(b[a]));break;case 52:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(d.addLocationDataFn(e[a- -2])(new d.Value(b[a-2])),b[a],"object",{operatorToken:d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]))}));break;case 53:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(d.addLocationDataFn(e[a-4])(new d.Value(b[a-4])),b[a-1],"object",{operatorToken:d.addLocationDataFn(e[a-3])(new d.Literal(b[a-3]))}));break;case 54:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(d.addLocationDataFn(e[a-2])(new d.Value(b[a-2])),b[a],null,{operatorToken:d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]))})); -break;case 55:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(d.addLocationDataFn(e[a-4])(new d.Value(b[a-4])),b[a-1],null,{operatorToken:d.addLocationDataFn(e[a-3])(new d.Literal(b[a-3]))}));break;case 62:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Return(b[a]));break;case 63:this.$=d.addLocationDataFn(e[a],e[a])(new d.Return);break;case 64:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.YieldReturn(b[a]));break;case 65:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.YieldReturn);break;case 66:this.$= -d.addLocationDataFn(e[a],e[a])(new d.Comment(b[a]));break;case 67:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Code(b[a-3],b[a],b[a-1]));break;case 68:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Code([],b[a],b[a-1]));break;case 69:this.$=d.addLocationDataFn(e[a],e[a])("func");break;case 70:this.$=d.addLocationDataFn(e[a],e[a])("boundfunc");break;case 73:case 110:this.$=d.addLocationDataFn(e[a],e[a])([]);break;case 74:case 111:case 130:case 150:case 182:case 224:this.$=d.addLocationDataFn(e[a], -e[a])([b[a]]);break;case 75:case 112:case 131:case 151:case 183:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].concat(b[a]));break;case 76:case 113:case 132:case 152:case 184:this.$=d.addLocationDataFn(e[a-3],e[a])(b[a-3].concat(b[a]));break;case 77:case 114:case 134:case 154:case 186:this.$=d.addLocationDataFn(e[a-5],e[a])(b[a-5].concat(b[a-2]));break;case 78:this.$=d.addLocationDataFn(e[a],e[a])(new d.Param(b[a]));break;case 79:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Param(b[a-1],null,!0)); -break;case 80:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Param(b[a-2],b[a]));break;case 81:case 189:this.$=d.addLocationDataFn(e[a],e[a])(new d.Expansion);break;case 86:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Splat(b[a-1]));break;case 88:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].add(b[a]));break;case 89:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Value(b[a-1],[].concat(b[a])));break;case 99:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Access(b[a]));break;case 100:this.$=d.addLocationDataFn(e[a- -1],e[a])(new d.Access(b[a],"soak"));break;case 101:this.$=d.addLocationDataFn(e[a-1],e[a])([d.addLocationDataFn(e[a-1])(new d.Access(new d.PropertyName("prototype"))),d.addLocationDataFn(e[a])(new d.Access(b[a]))]);break;case 102:this.$=d.addLocationDataFn(e[a-1],e[a])([d.addLocationDataFn(e[a-1])(new d.Access(new d.PropertyName("prototype"),"soak")),d.addLocationDataFn(e[a])(new d.Access(b[a]))]);break;case 103:this.$=d.addLocationDataFn(e[a],e[a])(new d.Access(new d.PropertyName("prototype"))); -break;case 106:this.$=d.addLocationDataFn(e[a-1],e[a])(d.extend(b[a],{soak:!0}));break;case 107:this.$=d.addLocationDataFn(e[a],e[a])(new d.Index(b[a]));break;case 108:this.$=d.addLocationDataFn(e[a],e[a])(new d.Slice(b[a]));break;case 109:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Obj(b[a-2],b[a-3].generated));break;case 115:this.$=d.addLocationDataFn(e[a],e[a])(new d.Class);break;case 116:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Class(null,null,b[a]));break;case 117:this.$=d.addLocationDataFn(e[a- -2],e[a])(new d.Class(null,b[a]));break;case 118:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Class(null,b[a-1],b[a]));break;case 119:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Class(b[a]));break;case 120:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Class(b[a-1],null,b[a]));break;case 121:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Class(b[a-2],b[a]));break;case 122:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Class(b[a-3],b[a-1],b[a]));break;case 123:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.ImportDeclaration(null, -b[a]));break;case 124:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-2],null),b[a]));break;case 125:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ImportDeclaration(new d.ImportClause(null,b[a-2]),b[a]));break;case 126:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ImportDeclaration(new d.ImportClause(null,new d.ImportSpecifierList([])),b[a]));break;case 127:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.ImportDeclaration(new d.ImportClause(null,new d.ImportSpecifierList(b[a- -4])),b[a]));break;case 128:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-4],b[a-2]),b[a]));break;case 129:this.$=d.addLocationDataFn(e[a-8],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-7],new d.ImportSpecifierList(b[a-4])),b[a]));break;case 133:case 153:case 169:case 185:this.$=d.addLocationDataFn(e[a-3],e[a])(b[a-2]);break;case 135:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportSpecifier(b[a]));break;case 136:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportSpecifier(b[a- -2],b[a]));break;case 137:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportSpecifier(new d.Literal(b[a])));break;case 138:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportSpecifier(new d.Literal(b[a-2]),b[a]));break;case 139:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportDefaultSpecifier(b[a]));break;case 140:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportNamespaceSpecifier(new d.Literal(b[a-2]),b[a]));break;case 141:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList([]))); -break;case 142:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList(b[a-2])));break;case 143:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.ExportNamedDeclaration(b[a]));break;case 144:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-2],b[a],null,{moduleDeclaration:"export"})));break;case 145:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-3],b[a],null,{moduleDeclaration:"export"}))); -break;case 146:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-4],b[a-1],null,{moduleDeclaration:"export"})));break;case 147:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportDefaultDeclaration(b[a]));break;case 148:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ExportAllDeclaration(new d.Literal(b[a-2]),b[a]));break;case 149:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList(b[a-4]),b[a]));break;case 155:this.$=d.addLocationDataFn(e[a], -e[a])(new d.ExportSpecifier(b[a]));break;case 156:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(b[a-2],b[a]));break;case 157:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(b[a-2],new d.Literal(b[a])));break;case 158:this.$=d.addLocationDataFn(e[a],e[a])(new d.ExportSpecifier(new d.Literal(b[a])));break;case 159:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(new d.Literal(b[a-2]),b[a]));break;case 160:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.TaggedTemplateCall(b[a- -2],b[a],b[a-1]));break;case 161:case 162:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Call(b[a-2],b[a],b[a-1]));break;case 164:this.$=d.addLocationDataFn(e[a],e[a])(new d.SuperCall);break;case 165:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.SuperCall(b[a]));break;case 166:this.$=d.addLocationDataFn(e[a],e[a])(!1);break;case 167:this.$=d.addLocationDataFn(e[a],e[a])(!0);break;case 168:this.$=d.addLocationDataFn(e[a-1],e[a])([]);break;case 170:case 171:this.$=d.addLocationDataFn(e[a],e[a])(new d.Value(new d.ThisLiteral)); -break;case 172:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Value(d.addLocationDataFn(e[a-1])(new d.ThisLiteral),[d.addLocationDataFn(e[a])(new d.Access(b[a]))],"this"));break;case 173:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Arr([]));break;case 174:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Arr(b[a-2]));break;case 175:this.$=d.addLocationDataFn(e[a],e[a])("inclusive");break;case 176:this.$=d.addLocationDataFn(e[a],e[a])("exclusive");break;case 177:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Range(b[a- -3],b[a-1],b[a-2]));break;case 178:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Range(b[a-2],b[a],b[a-1]));break;case 179:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Range(b[a-1],null,b[a]));break;case 180:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Range(null,b[a],b[a-1]));break;case 181:this.$=d.addLocationDataFn(e[a],e[a])(new d.Range(null,null,b[a]));break;case 191:this.$=d.addLocationDataFn(e[a-2],e[a])([].concat(b[a-2],b[a]));break;case 192:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Try(b[a])); -break;case 193:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Try(b[a-1],b[a][0],b[a][1]));break;case 194:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Try(b[a-2],null,null,b[a]));break;case 195:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Try(b[a-3],b[a-2][0],b[a-2][1],b[a]));break;case 196:this.$=d.addLocationDataFn(e[a-2],e[a])([b[a-1],b[a]]);break;case 197:this.$=d.addLocationDataFn(e[a-2],e[a])([d.addLocationDataFn(e[a-1])(new d.Value(b[a-1])),b[a]]);break;case 198:this.$=d.addLocationDataFn(e[a- -1],e[a])([null,b[a]]);break;case 199:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Throw(b[a]));break;case 200:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Parens(b[a-1]));break;case 201:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Parens(b[a-2]));break;case 202:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.While(b[a]));break;case 203:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.While(b[a-2],{guard:b[a]}));break;case 204:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.While(b[a],{invert:!0}));break; -case 205:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.While(b[a-2],{invert:!0,guard:b[a]}));break;case 206:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].addBody(b[a]));break;case 207:case 208:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a].addBody(d.addLocationDataFn(e[a-1])(d.Block.wrap([b[a-1]]))));break;case 209:this.$=d.addLocationDataFn(e[a],e[a])(b[a]);break;case 210:this.$=d.addLocationDataFn(e[a-1],e[a])((new d.While(d.addLocationDataFn(e[a-1])(new d.BooleanLiteral("true")))).addBody(b[a])); -break;case 211:this.$=d.addLocationDataFn(e[a-1],e[a])((new d.While(d.addLocationDataFn(e[a-1])(new d.BooleanLiteral("true")))).addBody(d.addLocationDataFn(e[a])(d.Block.wrap([b[a]]))));break;case 212:case 213:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.For(b[a-1],b[a]));break;case 214:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.For(b[a],b[a-1]));break;case 215:this.$=d.addLocationDataFn(e[a-1],e[a])({source:d.addLocationDataFn(e[a])(new d.Value(b[a]))});break;case 216:this.$=d.addLocationDataFn(e[a- -3],e[a])({source:d.addLocationDataFn(e[a-2])(new d.Value(b[a-2])),step:b[a]});break;case 217:d=d.addLocationDataFn(e[a-1],e[a]);b[a].own=b[a-1].own;b[a].ownTag=b[a-1].ownTag;b[a].name=b[a-1][0];b[a].index=b[a-1][1];this.$=d(b[a]);break;case 218:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a]);break;case 219:wa=d.addLocationDataFn(e[a-2],e[a]);b[a].own=!0;b[a].ownTag=d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]));this.$=wa(b[a]);break;case 225:this.$=d.addLocationDataFn(e[a-2],e[a])([b[a-2],b[a]]); -break;case 226:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a]});break;case 227:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a],object:!0});break;case 228:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a]});break;case 229:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a],object:!0});break;case 230:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],step:b[a]});break;case 231:this.$=d.addLocationDataFn(e[a-5],e[a])({source:b[a-4],guard:b[a-2],step:b[a]}); -break;case 232:this.$=d.addLocationDataFn(e[a-5],e[a])({source:b[a-4],step:b[a-2],guard:b[a]});break;case 233:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a],from:!0});break;case 234:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a],from:!0});break;case 235:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Switch(b[a-3],b[a-1]));break;case 236:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.Switch(b[a-5],b[a-3],b[a-1]));break;case 237:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Switch(null, -b[a-1]));break;case 238:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.Switch(null,b[a-3],b[a-1]));break;case 240:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].concat(b[a]));break;case 241:this.$=d.addLocationDataFn(e[a-2],e[a])([[b[a-1],b[a]]]);break;case 242:this.$=d.addLocationDataFn(e[a-3],e[a])([[b[a-2],b[a-1]]]);break;case 243:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.If(b[a-1],b[a],{type:b[a-2]}));break;case 244:this.$=d.addLocationDataFn(e[a-4],e[a])(b[a-4].addElse(d.addLocationDataFn(e[a- -2],e[a])(new d.If(b[a-1],b[a],{type:b[a-2]}))));break;case 246:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].addElse(b[a]));break;case 247:case 248:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.If(b[a],d.addLocationDataFn(e[a-2])(d.Block.wrap([b[a-2]])),{type:b[a-1],statement:!0}));break;case 251:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("-",b[a]));break;case 252:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("+",b[a]));break;case 253:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("--", -b[a]));break;case 254:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("++",b[a]));break;case 255:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("--",b[a-1],null,!0));break;case 256:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("++",b[a-1],null,!0));break;case 257:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Existence(b[a-1]));break;case 258:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op("+",b[a-2],b[a]));break;case 259:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op("-",b[a-2],b[a]));break; -case 260:case 261:case 262:case 263:case 264:case 265:case 266:case 267:case 268:case 269:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op(b[a-1],b[a-2],b[a]));break;case 270:e=d.addLocationDataFn(e[a-2],e[a]);b="!"===b[a-1].charAt(0)?(new d.Op(b[a-1].slice(1),b[a-2],b[a])).invert():new d.Op(b[a-1],b[a-2],b[a]);this.$=e(b);break;case 271:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(b[a-2],b[a],b[a-1]));break;case 272:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(b[a-4],b[a-1],b[a-3])); -break;case 273:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Assign(b[a-3],b[a],b[a-2]));break;case 274:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Extends(b[a-2],b[a]))}},table:[{1:[2,1],3:1,4:2,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, -97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{1:[3]},{1:[2,2],6:qa},a(sa,[2,3]),a(sa,[2,6],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(sa,[2,7],{141:77,132:105,138:106,133:D,135:A,139:E,156:va}),a(sa,[2,8]),a(N,[2,14],{109:107,78:108,86:114,40:xa,41:xa,114:xa,82:ta,83:Na, -84:Fa,85:Ga,87:Ca,90:Pa,113:Ia}),a(N,[2,15],{86:114,109:117,78:118,82:ta,83:Na,84:Fa,85:Ga,87:Ca,90:Pa,113:Ia,114:xa}),a(N,[2,16]),a(N,[2,17]),a(N,[2,18]),a(N,[2,19]),a(N,[2,20]),a(N,[2,21]),a(N,[2,22]),a(N,[2,23]),a(N,[2,24]),a(N,[2,25]),a(N,[2,26]),a(Ea,[2,9]),a(Ea,[2,10]),a(Ea,[2,11]),a(Ea,[2,12]),a(Ea,[2,13]),a([1,6,32,42,131,133,135,139,156,163,164,165,166,167,168,169,170,171,172,173,174],Va,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26, -47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120,8:122,12:b,28:ea,29:Ya,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:[1,119],62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ba,Ha,{55:[1,124]}),a(Ba,[2,95]),a(Ba,[2,96]),a(Ba,[2,97]),a(Ba,[2,98]),a(t,[2,163]),a([6,31,65,70],p,{64:125,71:126,72:127,33:129,60:130, -74:131,75:132,34:g,73:d,92:m,118:wa,119:e}),{30:135,31:Da},{7:137,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C, -158:T,159:v,160:Y,161:S,162:M},{7:138,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}, -{7:139,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:140,8:122,10:20,11:21,12:b, -13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{15:142,16:143,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57, -44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71,74:53,75:54,77:141,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},{15:142,16:143,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71,74:53,75:54,77:145,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},a(Ta,ua,{96:[1,149],161:[1,146],162:[1,147],175:[1,148]}),a(N,[2,245],{151:[1,150]}),{30:151,31:Da},{30:152,31:Da},a(N,[2,209]),{30:153,31:Da},{7:154,8:122,10:20,11:21, -12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,155],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Eb,[2,115],{47:27,79:28,80:29,81:30,111:31, -74:53,75:54,37:55,43:57,33:70,60:71,39:80,15:142,16:143,54:144,30:156,77:158,31:Da,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,92:m,96:[1,157],112:L,117:V,118:X,119:G,130:W}),{7:159,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P, -111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ea,Za,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:160,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w, -92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a([1,6,31,32,42,70,94,131,133,135,139,156],[2,66]),{33:165,34:g,39:161,40:r,41:n,92:[1,164],98:162,99:163,104:Fb},{25:168,33:169,34:g,92:[1,167],95:k,103:[1,170],107:[1,171]},a(Ta,[2,92]),a(Ta,[2,93]),a(Ba,[2,40]),a(Ba,[2,41]),a(Ba,[2,42]),a(Ba,[2,43]),a(Ba,[2,44]),a(Ba,[2,45]),a(Ba,[2,46]),a(Ba,[2,47]),{4:172,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,31:[1,173],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:174,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, -23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:176,117:V,118:X,119:G,120:Gb,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ba,[2,170]),a(Ba,[2,171],{35:181,36:Oa}),a([1,6,31,32,42,46,65,70,73,82, -83,84,85,87,89,90,94,113,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],[2,164],{110:183,114:sb}),{31:[2,69]},{31:[2,70]},a(La,[2,87]),a(La,[2,90]),{7:185,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K, -105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:186,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X, -119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:187,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43, -133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:189,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,30:188,31:Da,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44, -137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{33:194,34:g,60:195,74:196,75:197,80:190,92:m,118:wa,119:G,143:191,144:[1,192],145:193},{142:198,146:[1,199],147:[1,200],148:[1,201]},a([6,31,70,94],Hb,{39:80,93:202,56:203,57:204,59:205,11:206,37:207,33:208,35:209,60:210,34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),a(Ib,[2,34]),a(Ib,[2,35]),a(Ba,[2,38]),{15:142,16:211,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71, -74:53,75:54,77:212,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},a([1,6,29,31,32,40,41,42,55,58,65,70,73,82,83,84,85,87,89,90,94,96,102,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],[2,32]),a(Jb,[2,36]),{4:213,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F, -50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(sa,[2,5],{7:4,8:5,9:6,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57, -33:70,60:71,141:77,39:80,5:214,12:b,28:u,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:D,135:A,137:q,139:E,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(N,[2,257]),{7:215,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71, -61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:216,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w, -74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:217,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29, -81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:218,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31, -112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:219,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa, -129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:220,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A, -136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:221,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77, -149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:222,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T, -159:v,160:Y,161:S,162:M},{7:223,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:224, -8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:225,8:122,10:20,11:21,12:b,13:23, -14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:226,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:227,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, -25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:228,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70, -34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,208]),a(N,[2,213]),{7:229,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g, -37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,207]),a(N,[2,212]),{39:230,40:r,41:n,110:231,114:sb},a(La,[2,88]),a(Kb,[2,167]),{35:232,36:Oa},{35:233,36:Oa},a(La,[2,103],{35:234,36:Oa}),{35:235,36:Oa},a(La, -[2,104]),{7:237,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Lb,74:53,75:54,77:40,79:28,80:29,81:30,88:236,91:238,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,121:239,122:tb,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y, -161:S,162:M},{86:242,87:Ca,90:Pa},{110:243,114:sb},a(La,[2,89]),a(sa,[2,65],{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:244,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:Za,135:Za,139:Za,156:Za, -137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ma,[2,28],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:245,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P, -111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{132:105,133:D,135:A,138:106,139:E,141:77,156:va},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,163,164,165,166,167,168,169,170,171,172,173,174],Va,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44, -138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120,8:122,12:b,28:ea,29:Ya,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),{6:[1,247],7:246,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,248],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I, -49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([6,31],Ja,{69:251,65:[1,249],70:Mb}),a(Sa,[2,74]),a(Sa,[2,78],{55:[1,253],73:[1,252]}),a(Sa,[2,81]),a(fb,[2,82]),a(fb,[2,83]),a(fb,[2,84]),a(fb,[2,85]),{35:181,36:Oa},{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7, -16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:176,117:V,118:X,119:G,120:Gb,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,68]),{4:256,5:3,7:4,8:5,9:6, -10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,32:[1,255],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([1,6,31,32,42,65,70,73,89,94, -115,120,122,131,133,134,135,139,140,156,159,160,164,165,166,167,168,169,170,171,172,173,174],[2,249],{141:77,132:102,138:103,163:fa}),a(ab,[2,250],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,251],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,252],{141:77,132:102,138:103,163:fa,165:ga}),a(N,[2,253],{40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua}),a(Kb,xa,{109:107,78:108,86:114,82:ta,83:Na,84:Fa,85:Ga,87:Ca,90:Pa,113:Ia}),{78:118,82:ta,83:Na,84:Fa,85:Ga,86:114,87:Ca,90:Pa,109:117, -113:Ia,114:xa},a(Nb,Ha),a(N,[2,254],{40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua}),a(N,[2,255]),a(N,[2,256]),{6:[1,259],7:257,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,258],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U, -130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:260,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44, -137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{30:261,31:Da,155:[1,262]},a(N,[2,192],{126:263,127:[1,264],128:[1,265]}),a(N,[2,206]),a(N,[2,214]),{31:[1,266],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{150:267,152:268,153:gb},a(N,[2,116]),{7:270,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea, -33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Eb,[2,119],{30:271,31:Da,40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua,96:[1,272]}),a(Ma,[2,199],{141:77,132:102,138:103,159:ma,160:Z, -163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,bb,{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,123]),{29:[1,273],70:[1,274]},{29:[1,275]},{31:hb,33:280,34:g,94:[1,276],100:277,101:278,103:Wa},a([29,70],[2,139]),{102:[1,282]},{31:ub,33:287,34:g,94:[1,283],103:cb,106:284,108:285},a(Ea,[2,143]),{55:[1,289]},{7:290,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{29:[1,291]},{6:qa,131:[1,292]},{4:293,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, -18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([6,31,70,120],Ob,{141:77,132:102,138:103,121:294,73:[1,295],122:tb,133:D,135:A,139:E, -156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(vb,[2,173]),a([6,31,120],Ja,{69:296,70:ib}),a(Qa,[2,182]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31, -112:L,116:298,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,[2,188]),a(Qa,[2,189]),a(Pb,[2,172]),a(Pb,[2,33]),a(t,[2,165]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua, -74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,115:[1,299],116:300,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{30:301,31:Da,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Qb,[2,202],{141:77,132:102,138:103,133:D,134:[1,302],135:A,139:E,159:ma,160:Z,163:fa,164:ia, -165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Qb,[2,204],{141:77,132:102,138:103,133:D,134:[1,303],135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,210]),a(Xa,[2,211],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,156,159,160,163,164,165,166,167,168, -169,170,171,172,173,174],[2,215],{140:[1,304]}),a(jb,[2,218]),{33:194,34:g,60:195,74:196,75:197,92:m,118:wa,119:e,143:305,145:193},a(jb,[2,224],{70:[1,306]}),a(kb,[2,220]),a(kb,[2,221]),a(kb,[2,222]),a(kb,[2,223]),a(N,[2,217]),{7:307,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40, -79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:308,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, -97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:309,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V, -118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(lb,Ja,{69:310,70:Rb}),a(Aa,[2,111]),a(Aa,[2,51],{58:[1,312]}),a(Sb,[2,60],{55:[1,313]}),a(Aa,[2,56]),a(Sb,[2,61]),a(wb,[2,57]),a(wb,[2,58]),a(wb,[2,59]),{46:[1,314],78:118,82:ta,83:Na,84:Fa,85:Ga,86:114,87:Ca,90:Pa,109:117,113:Ia,114:xa},a(Nb,ua),{6:qa,42:[1,315]},a(sa,[2,4]),a(Tb,[2,258],{141:77,132:102,138:103,163:fa,164:ia,165:ga}),a(Tb,[2,259],{141:77, -132:102,138:103,163:fa,164:ia,165:ga}),a(ab,[2,260],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,261],{141:77,132:102,138:103,163:fa,165:ga}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,166,167,168,169,170,171,172,173,174],[2,262],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173],[2,263],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,174:da}), -a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,168,169,170,171,172,173],[2,264],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,169,170,171,172,173],[2,265],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,170,171,172,173],[2,266],{141:77,132:102,138:103,159:ma,160:Z, -163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,171,172,173],[2,267],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,172,173],[2,268],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134, -135,139,140,156,173],[2,269],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173,174],[2,270],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja}),a(Xa,[2,248],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,247],{141:77,132:102, -138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(t,[2,160]),a(t,[2,161]),a(La,[2,99]),a(La,[2,100]),a(La,[2,101]),a(La,[2,102]),{89:[1,316]},{73:Lb,89:[2,107],121:317,122:tb,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{89:[2,108]},{7:318,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15, -24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,181],92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ub,[2,175]),a(Ub,Vb),a(La,[2,106]),a(t,[2,162]),a(sa,[2,64],{141:77,132:102,138:103,133:bb,135:bb,139:bb,156:bb, -159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,29],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,48],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:319,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g, -37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:320,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57, -44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{66:321,67:c,68:w},a(Ra,db,{72:127,33:129,60:130,74:131,75:132,71:322,34:g,73:d,92:m,118:wa,119:e}),{6:Wb,31:Xb},a(Sa,[2,79]),{7:325,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,Ob,{141:77,132:102,138:103,73:[1,326],133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga, -166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Yb,[2,30]),{6:qa,32:[1,327]},a(Ma,[2,271],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:328,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40, -79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:329,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, -97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ma,[2,274],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,246]),{7:330,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27, -48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,193],{127:[1,331]}),{30:332,31:Da},{30:335,31:Da,33:333,34:g,75:334,92:m},{150:336,152:268,153:gb},{32:[1,337],151:[1,338],152:339,153:gb},a(mb,[2,239]),{7:341,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8, -17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,124:340,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Zb,[2,117],{141:77,132:102,138:103,30:342,31:Da,133:D,135:A,139:E,159:ma, -160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,120]),{7:343,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45, -139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{39:344,40:r,41:n},{92:[1,346],99:345,104:Fb},{39:347,40:r,41:n},{29:[1,348]},a(lb,Ja,{69:349,70:nb}),a(Aa,[2,130]),{31:hb,33:280,34:g,100:351,101:278,103:Wa},a(Aa,[2,135],{102:[1,352]}),a(Aa,[2,137],{102:[1,353]}),{33:354,34:g},a(Ea,[2,141]),a(lb,Ja,{69:355,70:xb}),a(Aa,[2,150]),{31:ub,33:287,34:g,103:cb,106:357,108:285},a(Aa,[2,155],{102:[1,358]}),a(Aa,[2,158],{102:[1,359]}),{6:[1,361],7:360,8:122,10:20,11:21,12:b,13:23,14:24, -15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,362],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(yb,[2,147],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma, -160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{39:363,40:r,41:n},a(Ba,[2,200]),{6:qa,32:[1,364]},{7:365,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W, -132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([12,28,34,38,40,41,44,45,48,49,50,51,52,53,61,62,63,67,68,92,95,97,105,112,117,118,119,125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Vb,{6:eb,31:eb,70:eb,120:eb}),{6:ob,31:pb,120:[1,366]},a([6,31,32,115,120],db,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43, -136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,76:179,7:254,123:369,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,73:Ua,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:D,135:A,137:q,139:E,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ra,Ja,{69:370,70:ib}),a(t,[2,168]),a([6,31,115],Ja,{69:371,70:ib}),a($b,[2,243]),{7:372,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, -23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:373,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19, -28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:374,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h, -39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(jb,[2,219]),{33:194,34:g,60:195,74:196,75:197,92:m,118:wa,119:e,145:375},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,156],[2,226],{141:77,132:102,138:103,134:[1, -376],140:[1,377],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,227],{141:77,132:102,138:103,134:[1,378],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,233],{141:77,132:102,138:103,134:[1,379],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{6:ac,31:bc,94:[1,380]},a(Ab,db,{39:80,57:204,59:205,11:206,37:207,33:208,35:209,60:210,56:383, -34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),{7:384,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,385],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v, -160:Y,161:S,162:M},{7:386,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,387],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}, -a(Ba,[2,39]),a(Jb,[2,37]),a(La,[2,105]),{7:388,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,179],92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v, -160:Y,161:S,162:M},{89:[2,180],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ma,[2,49],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{32:[1,389],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{30:390,31:Da},a(Sa,[2,75]),{33:129, -34:g,60:130,71:391,72:127,73:d,74:131,75:132,92:m,118:wa,119:e},a(cc,p,{71:126,72:127,33:129,60:130,74:131,75:132,64:392,34:g,73:d,92:m,118:wa,119:e}),a(Sa,[2,80],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Qa,eb),a(Yb,[2,31]),{32:[1,393],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ma,[2,273], -{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{30:394,31:Da,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{30:395,31:Da},a(N,[2,194]),{30:396,31:Da},{30:397,31:Da},a(Bb,[2,198]),{32:[1,398],151:[1,399],152:339,153:gb},a(N,[2,237]),{30:400,31:Da},a(mb,[2,240]),{30:401,31:Da,70:[1,402]},a(dc,[2,190],{141:77,132:102,138:103,133:D, -135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,118]),a(Zb,[2,121],{141:77,132:102,138:103,30:403,31:Da,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,124]),{29:[1,404]},{31:hb,33:280,34:g,100:405,101:278,103:Wa},a(Ea,[2,125]),{39:406,40:r,41:n},{6:qb,31:rb,94:[1,407]},a(Ab,db,{33:280,101:410,34:g,103:Wa}),a(Ra,Ja,{69:411,70:nb}),{33:412,34:g}, -{33:413,34:g},{29:[2,140]},{6:Cb,31:Db,94:[1,414]},a(Ab,db,{33:287,108:417,34:g,103:cb}),a(Ra,Ja,{69:418,70:xb}),{33:419,34:g,103:[1,420]},{33:421,34:g},a(yb,[2,144],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:422,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q, -51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:423,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R, -62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ea,[2,148]),{131:[1,424]},{120:[1,425],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(vb,[2,174]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, -18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,123:426,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, -20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:427,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,[2,183]),{6:ob,31:pb,32:[1,428]},{6:ob,31:pb,115:[1,429]}, -a(Xa,[2,203],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,205],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,216],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(jb,[2,225]),{7:430,8:122,10:20,11:21, -12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:431,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, -18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:432,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, -23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:433,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19, -28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(vb,[2,109]),{11:206,33:208,34:g,35:209,36:Oa,37:207,38:h,39:80,40:r,41:n,56:434,57:204,59:205,60:210,62:z,118:wa},a(cc,Hb,{39:80,56:203,57:204, -59:205,11:206,37:207,33:208,35:209,60:210,93:435,34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),a(Aa,[2,112]),a(Aa,[2,52],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:436,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l, -66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Aa,[2,54],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:437,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18, -27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{89:[2,178],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na, -173:ra,174:da},a(N,[2,50]),a(N,[2,67]),a(Sa,[2,76]),a(Ra,Ja,{69:438,70:Mb}),a(N,[2,272]),a($b,[2,244]),a(N,[2,195]),a(Bb,[2,196]),a(Bb,[2,197]),a(N,[2,235]),{30:439,31:Da},{32:[1,440]},a(mb,[2,241],{6:[1,441]}),{7:442,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30, -92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,122]),{39:443,40:r,41:n},a(lb,Ja,{69:444,70:nb}),a(Ea,[2,126]),{29:[1,445]},{33:280,34:g,101:446,103:Wa},{31:hb,33:280,34:g,100:447,101:278,103:Wa},a(Aa,[2,131]),{6:qb,31:rb,32:[1,448]},a(Aa,[2,136]),a(Aa,[2,138]),a(Ea,[2,142],{29:[1,449]}),{33:287,34:g,103:cb,108:450},{31:ub,33:287,34:g,103:cb,106:451,108:285}, -a(Aa,[2,151]),{6:Cb,31:Db,32:[1,452]},a(Aa,[2,156]),a(Aa,[2,157]),a(Aa,[2,159]),a(yb,[2,145],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{32:[1,453],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ba,[2,201]),a(Ba,[2,177]),a(Qa,[2,184]),a(Ra,Ja,{69:454,70:ib}),a(Qa,[2,185]),a(t,[2,169]),a([1,6,31,32,42, -65,70,73,89,94,115,120,122,131,133,134,135,139,156],[2,228],{141:77,132:102,138:103,140:[1,455],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,230],{141:77,132:102,138:103,134:[1,456],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,229],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,234],{141:77,132:102, -138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Aa,[2,113]),a(Ra,Ja,{69:457,70:Rb}),{32:[1,458],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{32:[1,459],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{6:Wb,31:Xb,32:[1,460]},{32:[1,461]},a(N, -[2,238]),a(mb,[2,242]),a(dc,[2,191],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,128]),{6:qb,31:rb,94:[1,462]},{39:463,40:r,41:n},a(Aa,[2,132]),a(Ra,Ja,{69:464,70:nb}),a(Aa,[2,133]),{39:465,40:r,41:n},a(Aa,[2,152]),a(Ra,Ja,{69:466,70:xb}),a(Aa,[2,153]),a(Ea,[2,146]),{6:ob,31:pb,32:[1,467]},{7:468,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, -25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:469,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70, -34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{6:ac,31:bc,32:[1,470]},a(Aa,[2,53]),a(Aa,[2,55]),a(Sa,[2,77]),a(N,[2,236]),{29:[1,471]},a(Ea,[2,127]),{6:qb,31:rb,32:[1,472]},a(Ea,[2,149]),{6:Cb,31:Db,32:[1, -473]},a(Qa,[2,186]),a(Ma,[2,231],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,232],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Aa,[2,114]),{39:474,40:r,41:n},a(Aa,[2,134]),a(Aa,[2,154]),a(Ea,[2,129])],defaultActions:{68:[2,69],69:[2,70],238:[2,108],354:[2,140]},parseError:function(a,d){if(d.recoverable)this.trace(a);else{var e=function(a, -d){this.message=a;this.hash=d};e.prototype=Error;throw new e(a,d);}},parse:function(a){var d=[0],e=[null],b=[],p=this.table,t="",wa=0,c=0,g=0,Da=b.slice.call(arguments,1),k=Object.create(this.lexer),h={};for(f in this.yy)Object.prototype.hasOwnProperty.call(this.yy,f)&&(h[f]=this.yy[f]);k.setInput(a,h);h.lexer=k;h.parser=this;"undefined"==typeof k.yylloc&&(k.yylloc={});var f=k.yylloc;b.push(f);var l=k.options&&k.options.ranges;this.parseError="function"===typeof h.parseError?h.parseError:Object.getPrototypeOf(this).parseError; -for(var m,Ta,Ha,n,ua={},y,w;;){Ha=d[d.length-1];if(this.defaultActions[Ha])n=this.defaultActions[Ha];else{if(null===m||"undefined"==typeof m)m=k.lex()||1,"number"!==typeof m&&(m=this.symbols_[m]||m);n=p[Ha]&&p[Ha][m]}if("undefined"===typeof n||!n.length||!n[0]){w=[];for(y in p[Ha])this.terminals_[y]&&2=ta?this.wrapInBraces(d):d};b.prototype.compileRoot=function(a){var d,b;a.indent=a.bare?"":Ca;a.level=N;this.spaced=!0;a.scope=new xa(null,this,null,null!=(b=a.referencedVars)?b:[]);var e=a.locals||[];b=0;for(d=e.length;b=Fa?this.wrapInBraces(d): -d};return b}(w);f.StringLiteral=D=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.RegexLiteral=X=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.PassthroughLiteral=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.IdentifierLiteral=x=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isAssignable=ha; -return b}(z);f.PropertyName=L=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isAssignable=ha;return b}(z);f.StatementLiteral=W=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isStatement=ha;b.prototype.makeReturn=na;b.prototype.jumps=function(a){if("break"===this.value&&!(null!=a&&a.loop||null!=a&&a.block)||"continue"===this.value&&(null==a||!a.loop))return this};b.prototype.compileNode=function(a){return[this.makeCode(""+ -this.tab+this.value+";")]};return b}(z);f.ThisLiteral=E=function(a){function b(){b.__super__.constructor.call(this,"this")}v(b,a);b.prototype.compileNode=function(a){var d;a=null!=(d=a.scope.method)&&d.bound?a.scope.method.context:this.value;return[this.makeCode(a)]};return b}(z);f.UndefinedLiteral=ca=function(a){function b(){b.__super__.constructor.call(this,"undefined")}v(b,a);b.prototype.compileNode=function(a){return[this.makeCode(a.level>=Ga?"(void 0)":"void 0")]};return b}(z);f.NullLiteral= -c=function(a){function b(){b.__super__.constructor.call(this,"null")}v(b,a);return b}(z);f.BooleanLiteral=b=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.Return=G=function(a){function b(a){this.expression=a}v(b,a);b.prototype.children=["expression"];b.prototype.isStatement=ha;b.prototype.makeReturn=na;b.prototype.jumps=na;b.prototype.compileToFragments=function(a,d){var p;var e=null!=(p=this.expression)?p.makeReturn():void 0;return!e||e instanceof -b?b.__super__.compileToFragments.call(this,a,d):e.compileToFragments(a,d)};b.prototype.compileNode=function(a){var b=[];b.push(this.makeCode(this.tab+("return"+(this.expression?" ":""))));this.expression&&(b=b.concat(this.expression.compileToFragments(a,Ka)));b.push(this.makeCode(";"));return b};return b}(sa);f.YieldReturn=T=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.compileNode=function(a){null==a.scope.parent&&this.error("yield can only occur inside functions"); -return b.__super__.compileNode.apply(this,arguments)};return b}(G);f.Value=C=function(a){function t(a,b,wa){if(!b&&a instanceof t)return a;this.base=a;this.properties=b||[];wa&&(this[wa]=!0);return this}v(t,a);t.prototype.children=["base","properties"];t.prototype.add=function(a){this.properties=this.properties.concat(a);return this};t.prototype.hasProperties=function(){return!!this.properties.length};t.prototype.bareLiteral=function(a){return!this.properties.length&&this.base instanceof a};t.prototype.isArray= -function(){return this.bareLiteral(q)};t.prototype.isRange=function(){return this.bareLiteral(V)};t.prototype.isComplex=function(){return this.hasProperties()||this.base.isComplex()};t.prototype.isAssignable=function(){return this.hasProperties()||this.base.isAssignable()};t.prototype.isNumber=function(){return this.bareLiteral(w)};t.prototype.isString=function(){return this.bareLiteral(D)};t.prototype.isRegex=function(){return this.bareLiteral(X)};t.prototype.isUndefined=function(){return this.bareLiteral(ca)}; -t.prototype.isNull=function(){return this.bareLiteral(c)};t.prototype.isBoolean=function(){return this.bareLiteral(b)};t.prototype.isAtomic=function(){var a;var b=this.properties.concat(this.base);var wa=0;for(a=b.length;wathis.properties.length&&!this.base.isComplex()&&(null==p||!p.isComplex()))return[this,this];b=new t(this.base,this.properties.slice(0,-1));if(b.isComplex()){var e=new x(a.scope.freeVariable("base"));b=new t(new P(new y(e, -b)))}if(!p)return[b,e];if(p.isComplex()){var c=new x(a.scope.freeVariable("name"));p=new R(new y(c,p.index));c=new R(c)}return[b.add(p),new t(e||b.base,[c||p])]};t.prototype.compileNode=function(a){var b;this.base.front=this.front;var p=this.properties;var e=this.base.compileToFragments(a,p.length?Ga:null);p.length&&Pa.test(da(e))&&e.push(this.makeCode("."));var t=0;for(b=p.length;t=Math.abs(this.fromNum-this.toNum)){var c=function(){e=[];for(var a=p=this.fromNum,b=this.toNum;p<=b?a<=b:a>=b;p<=b?a++:a--)e.push(a);return e}.apply(this);this.exclusive&&c.pop();return[this.makeCode("["+c.join(", ")+"]")]}var t=this.tab+Ca;var f=a.scope.freeVariable("i",{single:!0});var g=a.scope.freeVariable("results");var k="\n"+t+g+" \x3d [];";if(b)a.index=f,b=da(this.compileNode(a));else{var h= -f+" \x3d "+this.fromC+(this.toC!==this.toVar?", "+this.toC:"");b=this.fromVar+" \x3c\x3d "+this.toVar;b="var "+h+"; "+b+" ? "+f+" \x3c"+this.equals+" "+this.toVar+" : "+f+" \x3e"+this.equals+" "+this.toVar+"; "+b+" ? "+f+"++ : "+f+"--"}f="{ "+g+".push("+f+"); }\n"+t+"return "+g+";\n"+a.indent;a=function(a){return null!=a?a.contains(Va):void 0};if(a(this.from)||a(this.to))c=", arguments";return[this.makeCode("(function() {"+k+"\n"+t+"for ("+b+")"+f+"}).apply(this"+(null!=c?c:"")+")")]};return b}(sa); -f.Slice=aa=function(a){function b(a){this.range=a;b.__super__.constructor.call(this)}v(b,a);b.prototype.children=["range"];b.prototype.compileNode=function(a){var b=this.range;var p=b.to;var e=(b=b.from)&&b.compileToFragments(a,Ka)||[this.makeCode("0")];if(p){b=p.compileToFragments(a,Ka);var c=da(b);if(this.range.exclusive||-1!==+c)var t=", "+(this.range.exclusive?c:p.isNumber()?""+(+c+1):(b=p.compileToFragments(a,Ga),"+"+da(b)+" + 1 || 9e9"))}return[this.makeCode(".slice("+da(e)+(t||"")+")")]};return b}(sa); -f.Obj=m=function(a){function b(a,b){this.generated=null!=b?b:!1;this.objects=this.properties=a||[]}v(b,a);b.prototype.children=["properties"];b.prototype.compileNode=function(a){var b,p,e;var c=this.properties;if(this.generated){var t=0;for(b=c.length;t= -Fa?this.wrapInBraces(t):t}var h=g[0];1===e&&h instanceof H&&h.error("Destructuring assignment has no target");var m=this.variable.isObject();if(p&&1===e&&!(h instanceof U)){var l=null;if(h instanceof b&&"object"===h.context){t=h;var n=t.variable;var q=n.base;h=t.value;h instanceof b&&(l=h.value,h=h.variable)}else h instanceof b&&(l=h.value,h=h.variable),q=m?h["this"]?h.properties[0].name:new L(h.unwrap().value):new w(0);var r=q.unwrap()instanceof L;f=new C(f);f.properties.push(new (r?qa:R)(q));(c= -za(h.unwrap().value))&&h.error(c);l&&(f=new k("?",f,l));return(new b(h,f,null,{param:this.param})).compileToFragments(a,N)}var v=f.compileToFragments(a,ta);var y=da(v);t=[];n=!1;f.unwrap()instanceof x&&!this.variable.assigns(y)||(t.push([this.makeCode((l=a.scope.freeVariable("ref"))+" \x3d ")].concat(M.call(v))),v=[this.makeCode(l)],y=l);l=f=0;for(d=g.length;fN?this.wrapInBraces(e):e};return b}(sa);f.Code=h=function(b){function c(b,d,c){this.params=b||[];this.body=d||new a;this.bound="boundfunc"===c;this.isGenerator=!!this.body.contains(function(a){return a instanceof k&&a.isYield()|| -a instanceof T})}v(c,b);c.prototype.children=["params","body"];c.prototype.isStatement=function(){return!!this.ctor};c.prototype.jumps=ka;c.prototype.makeScope=function(a){return new xa(a,this.body,this)};c.prototype.compileNode=function(b){var d,f,e,g;this.bound&&null!=(d=b.scope.method)&&d.bound&&(this.context=b.scope.method.context);if(this.bound&&!this.context)return this.context="_this",d=new c([new K(new x(this.context))],new a([this])),d=new ya(d,[new E]),d.updateLocationDataIfMissing(this.locationData), -d.compileNode(b);b.scope=la(b,"classScope")||this.makeScope(b.scope);b.scope.shared=la(b,"sharedScope");b.indent+=Ca;delete b.bare;delete b.isExistentialEquals;d=[];var p=[];var h=this.params;var t=0;for(e=h.length;t=Ga?this.wrapInBraces(p):p};c.prototype.eachParamName=function(a){var b;var c=this.params;var e=[];var f=0;for(b=c.length;f=d.length)return[];if(1===d.length)return e=d[0],d=e.compileToFragments(a,ta),c?d:[].concat(e.makeCode(Ia("slice",a)+".call("),d,e.makeCode(")"));c=d.slice(f);var h=g=0;for(p=c.length;g< -p;h=++g){e=c[h];var k=e.compileToFragments(a,ta);c[h]=e instanceof b?[].concat(e.makeCode(Ia("slice",a)+".call("),k,e.makeCode(")")):[].concat(e.makeCode("["),k,e.makeCode("]"))}if(0===f)return e=d[0],a=e.joinFragmentArrays(c.slice(1),", "),c[0].concat(e.makeCode(".concat("),a,e.makeCode(")"));g=d.slice(0,f);p=[];k=0;for(h=g.length;k=Ga)return(new P(this)).compileToFragments(a);var f="+"===c||"-"===c;("new"===c||"typeof"===c||"delete"===c||f&&this.first instanceof b&&this.first.operator===c)&&d.push([this.makeCode(" ")]);if(f&&this.first instanceof b||"new"===c&&this.first.isStatement(a))this.first=new P(this.first);d.push(this.first.compileToFragments(a,Fa));this.flip&&d.reverse();return this.joinFragmentArrays(d,"")};b.prototype.compileYield=function(a){var b; -var d=[];var c=this.operator;null==a.scope.parent&&this.error("yield can only occur inside functions");0<=S.call(Object.keys(this.first),"expression")&&!(this.first instanceof ba)?null!=this.first.expression&&d.push(this.first.expression.compileToFragments(a,Fa)):(a.level>=Ka&&d.push([this.makeCode("(")]),d.push([this.makeCode(c)]),""!==(null!=(b=this.first.base)?b.value:void 0)&&d.push([this.makeCode(" ")]),d.push(this.first.compileToFragments(a,Fa)),a.level>=Ka&&d.push([this.makeCode(")")]));return this.joinFragmentArrays(d, -"")};b.prototype.compilePower=function(a){var b=new C(new x("Math"),[new qa(new L("pow"))]);return(new ya(b,[this.first,this.second])).compileToFragments(a)};b.prototype.compileFloorDivision=function(a){var d=new C(new x("Math"),[new qa(new L("floor"))]);var c=this.second.isComplex()?new P(this.second):this.second;c=new b("/",this.first,c);return(new ya(d,[c])).compileToFragments(a)};b.prototype.compileModulo=function(a){var b=new C(new z(Ia("modulo",a)));return(new ya(b,[this.first,this.second])).compileToFragments(a)}; -b.prototype.toString=function(a){return b.__super__.toString.call(this,a,this.constructor.name+" "+this.operator)};return b}(sa);f.In=O=function(a){function b(a,b){this.object=a;this.array=b}v(b,a);b.prototype.children=["object","array"];b.prototype.invert=ra;b.prototype.compileNode=function(a){var b;if(this.array instanceof C&&this.array.isArray()&&this.array.base.objects.length){var c=this.array.base.objects;var e=0;for(b=c.length;e=c.length)?c:this.wrapInBraces(c)};return b}(sa); -f.StringWithInterpolations=A=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.compileNode=function(a){var d;if(!a.inTaggedTemplateCall)return b.__super__.compileNode.apply(this,arguments);var c=this.body.unwrap();var e=[];c.traverseChildren(!1,function(a){if(a instanceof D)e.push(a);else if(a instanceof P)return e.push(a),!1;return!0});c=[];c.push(this.makeCode("`"));var f=0;for(d=e.length;fh,this.step&&null!=h&&e||(d=n.freeVariable("len")),K=""+t+f+" \x3d 0, "+d+" \x3d "+A+".length",w=""+t+f+" \x3d "+A+".length - 1",d=f+" \x3c "+d,n=f+" \x3e\x3d 0",this.step?(null!=h?e&&(d= -n,K=w):(d=r+" \x3e 0 ? "+d+" : "+n,K="("+r+" \x3e 0 ? ("+K+") : "+w+")"),f=f+" +\x3d "+r):f=""+(q!==f?"++"+f:f+"++"),K=[this.makeCode(K+"; "+d+"; "+t+f)])}if(this.returns){var B=""+this.tab+c+" \x3d [];\n";var V="\n"+this.tab+"return "+c+";";l.makeReturn(c)}this.guard&&(1=Na?this.wrapInBraces(e):e};c.prototype.unfoldSoak=function(){return this.soak&&this};return c}(sa);var gc={extend:function(a){return"function(child, parent) { for (var key in parent) { if ("+Ia("hasProp",a)+".call(parent, key)) child[key] \x3d parent[key]; } function ctor() { this.constructor \x3d child; } ctor.prototype \x3d parent.prototype; child.prototype \x3d new ctor(); child.__super__ \x3d parent.prototype; return child; }"},bind:function(){return"function(fn, me){ return function(){ return fn.apply(me, arguments); }; }"}, -indexOf:function(){return"[].indexOf || function(item) { for (var i \x3d 0, l \x3d this.length; i \x3c l; i++) { if (i in this \x26\x26 this[i] \x3d\x3d\x3d item) return i; } return -1; }"},modulo:function(){return"function(a, b) { return (+a % (b \x3d +b) + b) % b; }"},hasProp:function(){return"{}.hasOwnProperty"},slice:function(){return"[].slice"}};var N=1;var Ka=2;var ta=3;var Na=4;var Fa=5;var Ga=6;var Ca=" ";var Pa=/^[+-]?\d+$/;var Ia=function(a,b){var c=b.scope.root;if(a in c.utilities)return c.utilities[a]; -var d=c.freeVariable(a);c.assign(d,gc[a](b));return c.utilities[a]=d};var Ea=function(a,b){a=a.replace(/\n/g,"$\x26"+b);return a.replace(/\s+$/,"")};var Va=function(a){return a instanceof x&&"arguments"===a.value};var ea=function(a){return a instanceof E||a instanceof h&&a.bound||a instanceof va};var Ya=function(a){return a.isComplex()||("function"===typeof a.isAssignable?a.isAssignable():void 0)};var Ba=function(a,b,c){if(a=b[c].unfoldSoak(a))return b[c]=a.body,a.body=new C(b),a}}).call(this);return f}(); -u["./sourcemap"]=function(){var f={};(function(){var u=function(){function f(f){this.line=f;this.columns=[]}f.prototype.add=function(f,a,b){var q=a[0];a=a[1];null==b&&(b={});if(!this.columns[f]||!b.noReplace)return this.columns[f]={line:this.line,column:f,sourceLine:q,sourceColumn:a}};f.prototype.sourceLocation=function(f){for(var a;!((a=this.columns[f])||0>=f);)f--;return a&&[a.sourceLine,a.sourceColumn]};return f}();f=function(){function f(){this.lines=[]}f.prototype.add=function(f,a,b){var q;null== -b&&(b={});var g=a[0];a=a[1];return((q=this.lines)[g]||(q[g]=new u(g))).add(a,f,b)};f.prototype.sourceLocation=function(f){var a;var b=f[0];for(f=f[1];!((a=this.lines[b])||0>=b);)b--;return a&&a.sourceLocation(f)};f.prototype.generate=function(f,a){var b,q,g,h,r,n,u;null==f&&(f={});null==a&&(a=null);var y=g=q=u=0;var I=!1;var F="";var Q=this.lines;var x=b=0;for(h=Q.length;bf?1:0);a||!b;)f=a&31,(a>>=5)&&(f|=32),b+=this.encodeBase64(f);return b};f.prototype.encodeBase64=function(f){var a;if(!(a= -"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"[f]))throw Error("Cannot Base64 encode value: "+f);return a};return f}()}).call(this);return f}();u["./coffee-script"]=function(){var f={};(function(){var qa,q,y={}.hasOwnProperty;var a=u("fs");var b=u("vm");var ya=u("path");var g=u("./lexer").Lexer;var h=u("./parser").parser;var r=u("./helpers");var n=u("./sourcemap");var B=u("../../package.json");f.VERSION=B.version;f.FILE_EXTENSIONS=[".coffee",".litcoffee",".coffee.md"];f.helpers= -r;var H=function(a){switch(!1){case "function"!==typeof Buffer:return(new Buffer(a)).toString("base64");case "function"!==typeof btoa:return btoa(encodeURIComponent(a).replace(/%([0-9A-F]{2})/g,function(a,b){return String.fromCharCode("0x"+b)}));default:throw Error("Unable to base64 encode inline sourcemap.");}};B=function(a){return function(b,f){null==f&&(f={});try{return a.call(this,b,f)}catch(m){if("string"!==typeof b)throw m;throw r.updateSyntaxError(m,b,f.filename);}}};var I={};var F={};f.compile= -qa=B(function(a,b){var c,f,g,l;var q=r.extend;b=q({},b);var u=b.sourceMap||b.inlineMap||null==b.filename;q=b.filename||"\x3canonymous\x3e";I[q]=a;u&&(g=new n);var x=O.tokenize(a,b);var y=b;var G=[];var z=0;for(c=x.length;z>>=1,a+=a;return f};g.compact=function(a){var f,c,g,q;q=[];f=0;for(g=a.length;fh)return p.call(this,g,k-1);(t=g[0],0<=E.call(c,t))?h+=1:(m=g[0],0<=E.call(a,m))&&--h;k+=1}return k-1};m.prototype.removeLeadingNewlines=function(){var a,c,f,h,y;h=this.tokens;a=c=0;for(f=h.length;cm;f=0<=m?++h:--h){for(;"HERECOMMENT"===this.tag(c+f+a);)a+=2;if(null!=g[f]&&("string"===typeof g[f]&&(g[f]=[g[f]]),A=this.tag(c+f+a),0>E.call(g[f],A)))return-1}return c+f+a-1};m.prototype.looksObjectish=function(f){var k;if(-1E.call(g,t))&&((A=this.tag(f),0>E.call(c,A))||this.tokens[f].generated)&&(H=this.tag(f),0>E.call(G,H)));)(h=this.tag(f),0<=E.call(a,h))&&k.push(this.tag(f)),(m=this.tag(f),0<=E.call(c, +m))&&k.length&&k.pop(),--f;return l=this.tag(f),0<=E.call(g,l)};m.prototype.addImplicitBracesAndParens=function(){var k,g;k=[];g=null;return this.scanTokens(function(m,h,t){var p,A,y,q,r,I,w,x,z,B,u,C,M,F,J,N,O,K;K=m[0];B=(u=0E.call(a,c):return g[1]; +case "@"!==this.tag(h-2):return h-2;default:return h-1}}.call(this);"HERECOMMENT"===this.tag(A-2);)A-=2;this.insideForDeclaration="FOR"===z;I=0===A||(F=this.tag(A-1),0<=E.call(G,F))||t[A-1].newLine;if(J()&&(w=J(),F=w[0],u=w[1],("{"===F||"INDENT"===F&&"{"===this.tag(u-1))&&(I||","===this.tag(A-1)||"{"===this.tag(A-1))))return y(1);x(A,!!I);return y(2)}w()&&0<=E.call(G,K)&&(J()[2].sameLine=!1);x="OUTDENT"===B||u.newLine;if(0<=E.call(f,K)||0<=E.call(xa,K)&&x)for(;q();)if(x=J(),F=x[0],u=x[1],F=x[2],x= +F.sameLine,I=F.startsLine,r()&&","!==B)p();else if(w()&&!this.insideForDeclaration&&x&&"TERMINATOR"!==K&&":"!==B)A();else if(!w()||"TERMINATOR"!==K||","===B||I&&this.looksObjectish(h+1))break;else{if("HERECOMMENT"===z)return y(1);A()}if(!(","!==K||this.looksObjectish(h+1)||!w()||this.insideForDeclaration||"TERMINATOR"===z&&this.looksObjectish(h+2)))for(z="OUTDENT"===z?1:0;w();)A(h+z);return y(1)})};m.prototype.addLocationDataToGeneratedTokens=function(){return this.scanTokens(function(a,c,f){var h, +k,m;if(a[2]||!a.generated&&!a.explicit)return 1;"{"===a[0]&&(h=null!=(m=f[c+1])?m[2]:void 0)?(k=h.first_line,h=h.first_column):(h=null!=(k=f[c-1])?k[2]:void 0)?(k=h.last_line,h=h.last_column):k=h=0;a[2]={first_line:k,first_column:h,last_line:k,last_column:h};return 1})};m.prototype.fixOutdentLocationData=function(){return this.scanTokens(function(a,c,f){if(!("OUTDENT"===a[0]||a.generated&&"CALL_END"===a[0]||a.generated&&"}"===a[0]))return 1;c=f[c-1][2];a[2]={first_line:c.last_line,first_column:c.last_column, +last_line:c.last_line,last_column:c.last_column};return 1})};m.prototype.normalizeLines=function(){var a,c,f,h,m;m=f=h=null;c=function(a,c){var f,h,k,g;return";"!==a[1]&&(f=a[0],0<=E.call(F,f))&&!("TERMINATOR"===a[0]&&(h=this.tag(c+1),0<=E.call(q,h)))&&!("ELSE"===a[0]&&"THEN"!==m)&&!!("CATCH"!==(k=a[0])&&"FINALLY"!==k||"-\x3e"!==m&&"\x3d\x3e"!==m)||(g=a[0],0<=E.call(xa,g))&&this.tokens[c-1].newLine};a=function(a,c){return this.tokens.splice(","===this.tag(c-1)?c-1:c,0,h)};return this.scanTokens(function(k, +g,t){var p,y,l;k=k[0];if("TERMINATOR"===k){if("ELSE"===this.tag(g+1)&&"OUTDENT"!==this.tag(g-1))return t.splice.apply(t,[g,1].concat(O.call(this.indentation()))),1;if(p=this.tag(g+1),0<=E.call(q,p))return t.splice(g,1),0}if("CATCH"===k)for(p=y=1;2>=y;p=++y)if("OUTDENT"===(l=this.tag(g+p))||"TERMINATOR"===l||"FINALLY"===l)return t.splice.apply(t,[g+p,0].concat(O.call(this.indentation()))),2+p;0<=E.call(x,k)&&"INDENT"!==this.tag(g+1)&&("ELSE"!==k||"IF"!==this.tag(g+1))&&(m=k,l=this.indentation(t[g]), +f=l[0],h=l[1],"THEN"===m&&(f.fromThen=!0),t.splice(g+1,0,f),this.detectEnd(g+2,c,a),"THEN"===k&&t.splice(g,1));return 1})};m.prototype.tagPostfixConditionals=function(){var a,c,f;f=null;c=function(a,c){a=a[0];c=this.tokens[c-1][0];return"TERMINATOR"===a||"INDENT"===a&&0>E.call(x,c)};a=function(a,c){if("INDENT"!==a[0]||a.generated&&!a.fromThen)return f[0]="POST_"+f[0]};return this.scanTokens(function(h,k){if("IF"!==h[0])return 1;f=h;this.detectEnd(k+1,c,a);return 1})};m.prototype.indentation=function(a){var c, +f;c=["INDENT",2];f=["OUTDENT",2];a?(c.generated=f.generated=!0,c.origin=f.origin=a):c.explicit=f.explicit=!0;return[c,f]};m.prototype.generate=v;m.prototype.tag=function(a){var c;return null!=(c=this.tokens[a])?c[0]:void 0};return m}();u=[["(",")"],["[","]"],["{","}"],["INDENT","OUTDENT"],["CALL_START","CALL_END"],["PARAM_START","PARAM_END"],["INDEX_START","INDEX_END"],["STRING_START","STRING_END"],["REGEX_START","REGEX_END"]];g.INVERSES=w={};c=[];a=[];M=0;for(J=u.length;Mthis.indent){if(a)return this.indebt=f-this.indent,this.suppressNewlines(),c.length; +if(!this.tokens.length)return this.baseIndent=this.indent=f,c.length;a=f-this.indent+this.outdebt;this.token("INDENT",a,c.length-f,f);this.indents.push(a);this.ends.push({tag:"OUTDENT"});this.outdebt=this.indebt=0;this.indent=f}else fk&&(m=this.token("+", +"+"),m[2]={first_line:l[2].first_line,first_column:l[2].first_column,last_line:l[2].first_line,last_column:l[2].first_column});(y=this.tokens).push.apply(y,A)}if(r)return a=a[a.length-1],r.origin=["STRING",null,{first_line:r[2].first_line,first_column:r[2].first_column,last_line:a[2].last_line,last_column:a[2].last_column}],r=this.token("STRING_END",")"),r[2]={first_line:a[2].last_line,first_column:a[2].last_column,last_line:a[2].last_line,last_column:a[2].last_column}};g.prototype.pair=function(a){var c; +c=this.ends;c=c[c.length-1];return a!==(c=null!=c?c.tag:void 0)?("OUTDENT"!==c&&this.error("unmatched "+a),c=this.indents,c=c[c.length-1],this.outdentToken(c,!0),this.pair(a)):this.ends.pop()};g.prototype.getLineAndColumnFromChunk=function(a){var c,f;if(0===a)return[this.chunkLine,this.chunkColumn];f=a>=this.chunk.length?this.chunk:this.chunk.slice(0,+(a-1)+1||9E9);a=ea(f,"\n");c=this.chunkColumn;0da.call(ma.call(t).concat(ma.call(Ca)),a):return"keyword '"+c+"' can't be assigned";case 0>da.call(Y,a):return"'"+c+"' can't be assigned";case 0>da.call(W,a):return"reserved word '"+c+"' can't be assigned";default:return!1}};g.isUnassignable=la;ha=function(a){var c;return"IDENTIFIER"===a[0]?("from"===a[1]&&(a[1][0]="IDENTIFIER",!0),!0):"FOR"===a[0]?!1:"{"===(c=a[1])||"["===c||","===c||":"===c?!1:!0};t="true false null this new delete typeof in instanceof return throw break continue debugger yield if else switch for while do try catch finally class extends super import export default".split(" "); +Ca="undefined Infinity NaN then unless until loop of by when".split(" ");c={and:"\x26\x26",or:"||",is:"\x3d\x3d",isnt:"!\x3d",not:"!",yes:"true",no:"false",on:"true",off:"false"};a=function(){var a;a=[];for(oa in c)a.push(oa);return a}();Ca=Ca.concat(a);W="case function var void with const let enum native implements interface package private protected public static".split(" ");Y=["arguments","eval"];g.JS_FORBIDDEN=t.concat(W).concat(Y);ua=65279;J=/^(?!\d)((?:(?!\s)[$\w\x7f-\uffff])+)([^\n\S]*:(?!:))?/; +U=/^0b[01]+|^0o[0-7]+|^0x[\da-f]+|^\d*\.?\d+(?:e[+-]?\d+)?/i;T=/^(?:[-=]>|[-+*\/%<>&|^!?=]=|>>>=?|([-+:])\1|([&|<>*\/%])\2=?|\?(\.|::)|\.{2,3})/;ca=/^[^\n\S]+/;f=/^###([^#][\s\S]*?)(?:###[^\n\S]*|###$)|^(?:\s*#(?!##[^#]).*)+/;q=/^[-=]>/;A=/^(?:\n[^\n\S]*)+/;k=/^`(?!``)((?:[^`\\]|\\[\s\S])*)`/;N=/^```((?:[^`\\]|\\[\s\S]|`(?!``))*)```/;ba=/^(?:'''|"""|'|")/;K=/^(?:[^\\']|\\[\s\S])*/;V=/^(?:[^\\"#]|\\[\s\S]|\#(?!\{))*/;x=/^(?:[^\\']|\\[\s\S]|'(?!''))*/;G=/^(?:[^\\"#]|\\[\s\S]|"(?!"")|\#(?!\{))*/;Z=/((?:\\\\)+)|\\[^\S\n]*\n\s*/g; +X=/\s*\n\s*/g;F=/\n+([^\n\S]*)(?=\S)/g;aa=/^\/(?!\/)((?:[^[\/\n\\]|\\[^\n]|\[(?:\\[^\n]|[^\]\n\\])*\])*)(\/)?/;S=/^\w*/;ka=/^(?!.*(.).*\1)[imgy]*$/;v=/^(?:[^\\\/#]|\\[\s\S]|\/(?!\/\/)|\#(?!\{))*/;M=/((?:\\\\)+)|\\(\s)|\s+(?:#.*)?/g;z=/^(\/|\/{3}\s*)(\*)/;I=/^\/=?\s/;w=/\*\//;y=/^\s*(?:,|\??\.(?![.\d])|::)/;O=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7]|[1-7])|(x(?![\da-fA-F]{2}).{0,2})|(u(?![\da-fA-F]{4}).{0,4}))/;p=/^[^\n\S]*\n/;R=/\n[^\n\S]*$/;qa=/\s+$/;l="-\x3d +\x3d /\x3d *\x3d %\x3d ||\x3d \x26\x26\x3d ?\x3d \x3c\x3c\x3d \x3e\x3e\x3d \x3e\x3e\x3e\x3d \x26\x3d ^\x3d |\x3d **\x3d //\x3d %%\x3d".split(" "); +ya=["NEW","TYPEOF","DELETE","DO"];Xa=["!","~"];Q=["\x3c\x3c","\x3e\x3e","\x3e\x3e\x3e"];D="\x3d\x3d !\x3d \x3c \x3e \x3c\x3d \x3e\x3d".split(" ");P=["*","/","%","//","%%"];B=["IN","OF","INSTANCEOF"];xa="IDENTIFIER PROPERTY ) ] ? @ THIS SUPER".split(" ");E=xa.concat("NUMBER INFINITY NAN STRING STRING_END REGEX REGEX_END BOOL NULL UNDEFINED } ::".split(" "));H=E.concat(["++","--"]);h=["INDENT","OUTDENT","TERMINATOR"];r=[")","}","]"]}).call(this);return g}();u["./parser"]=function(){var g={},ua={exports:g}, +xa=function(){function g(){this.yy={}}var a=function(a,n,pa,b){pa=pa||{};for(b=a.length;b--;pa[a[b]]=n);return pa},c=[1,22],u=[1,25],f=[1,83],D=[1,79],l=[1,84],w=[1,85],G=[1,81],F=[1,82],x=[1,56],v=[1,58],M=[1,59],N=[1,60],J=[1,61],r=[1,62],E=[1,49],O=[1,50],m=[1,32],k=[1,68],t=[1,69],p=[1,78],h=[1,47],y=[1,51],P=[1,52],A=[1,67],H=[1,65],U=[1,66],T=[1,64],I=[1,42],aa=[1,48],S=[1,63],z=[1,73],B=[1,74],W=[1,75],C=[1,76],Q=[1,46],X=[1,72],Y=[1,34],V=[1,35],Z=[1,36],K=[1,37],ba=[1,38],R=[1,39],xa=[1, +86],ua=[1,6,32,42,131],qa=[1,101],ka=[1,89],ca=[1,88],ea=[1,87],ga=[1,90],ha=[1,91],la=[1,92],oa=[1,93],L=[1,94],ja=[1,95],ra=[1,96],da=[1,97],ma=[1,98],sa=[1,99],ia=[1,100],ya=[1,104],ta=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Ja=[2,165],Ta=[1,110],Ga=[1,111],Ua=[1,112],Fa=[1,113],Pa=[1,115],Qa=[1,116],Na=[1,109],za=[1,6,32,42,131,133,135,139,156],na=[2,27],fa=[1,123],Ha=[1,121],Aa=[1,6,31,32,40,41,42,65,70,73, +82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Ia=[2,94],b=[1,6,31,32,42,46,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],n=[2,73],pa=[1,128],e=[1,133],d=[1,134],va=[1,136],Ka=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172, +173,174],wa=[2,91],Gb=[1,6,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ab=[2,63],Hb=[1,166],bb=[1,178],Wa=[1,180],Ib=[1,175],Oa=[1,182],ub=[1,184],La=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],Jb=[2,110],Kb=[1,6,31,32,40,41,42,58,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134, +135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Lb=[1,6,31,32,40,41,42,46,58,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Mb=[40,41,114],Nb=[1,241],vb=[1,240],Ma=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156],Ea=[2,71],Ob=[1,250],Va=[6,31,32,65,70],hb=[6,31,32,55,65,70,73],cb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,164,166, +167,168,169,170,171,172,173,174],Pb=[40,41,82,83,84,85,87,90,113,114],ib=[1,269],db=[2,62],jb=[1,279],Ya=[1,281],wb=[1,286],eb=[1,288],Qb=[2,186],xb=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],kb=[1,297],Ra=[6,31,32,70,115,120],Rb=[1,6,31,32,40,41,42,55,58,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163, +164,165,166,167,168,169,170,171,172,173,174,175],Sb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,140,156],Za=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,134,140,156],lb=[146,147,148],mb=[70,146,147,148],nb=[6,31,94],Tb=[1,311],Ba=[6,31,32,70,94],Ub=[6,31,32,58,70,94],yb=[6,31,32,55,58,70,94],Vb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,166,167,168,169,170,171,172,173,174],Wb=[12,28,34,38,40,41,44,45,48,49,50,51,52,53,61,62,63,67,68,89,92,95,97,105,112,117,118,119, +125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Xb=[2,175],Sa=[6,31,32],fb=[2,72],Yb=[1,323],Zb=[1,324],$b=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,128,131,133,134,135,139,140,151,153,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ob=[32,151,153],ac=[1,6,32,42,65,70,73,89,94,115,120,122,131,134,140,156],pb=[1,350],zb=[1,356],Ab=[1,6,32,42,131,156],gb=[2,86],qb=[1,366],rb=[1,367],bc=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,151,156,159,160,163, +164,165,166,167,168,169,170,171,172,173,174],Bb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,140,156],cc=[1,380],dc=[1,381],Cb=[6,31,32,94],ec=[6,31,32,70],Db=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],fc=[31,70],sb=[1,407],tb=[1,408],Eb=[1,414],Fb=[1,415],gc={trace:function(){},yy:{},symbols_:{error:2,Root:3,Body:4,Line:5,TERMINATOR:6,Expression:7,Statement:8,YieldReturn:9,Return:10,Comment:11,STATEMENT:12, +Import:13,Export:14,Value:15,Invocation:16,Code:17,Operation:18,Assign:19,If:20,Try:21,While:22,For:23,Switch:24,Class:25,Throw:26,Yield:27,YIELD:28,FROM:29,Block:30,INDENT:31,OUTDENT:32,Identifier:33,IDENTIFIER:34,Property:35,PROPERTY:36,AlphaNumeric:37,NUMBER:38,String:39,STRING:40,STRING_START:41,STRING_END:42,Regex:43,REGEX:44,REGEX_START:45,REGEX_END:46,Literal:47,JS:48,UNDEFINED:49,NULL:50,BOOL:51,INFINITY:52,NAN:53,Assignable:54,"\x3d":55,AssignObj:56,ObjAssignable:57,":":58,SimpleObjAssignable:59, +ThisProperty:60,RETURN:61,HERECOMMENT:62,PARAM_START:63,ParamList:64,PARAM_END:65,FuncGlyph:66,"-\x3e":67,"\x3d\x3e":68,OptComma:69,",":70,Param:71,ParamVar:72,"...":73,Array:74,Object:75,Splat:76,SimpleAssignable:77,Accessor:78,Parenthetical:79,Range:80,This:81,".":82,"?.":83,"::":84,"?::":85,Index:86,INDEX_START:87,IndexValue:88,INDEX_END:89,INDEX_SOAK:90,Slice:91,"{":92,AssignList:93,"}":94,CLASS:95,EXTENDS:96,IMPORT:97,ImportDefaultSpecifier:98,ImportNamespaceSpecifier:99,ImportSpecifierList:100, +ImportSpecifier:101,AS:102,DEFAULT:103,IMPORT_ALL:104,EXPORT:105,ExportSpecifierList:106,EXPORT_ALL:107,ExportSpecifier:108,OptFuncExist:109,Arguments:110,Super:111,SUPER:112,FUNC_EXIST:113,CALL_START:114,CALL_END:115,ArgList:116,THIS:117,"@":118,"[":119,"]":120,RangeDots:121,"..":122,Arg:123,SimpleArgs:124,TRY:125,Catch:126,FINALLY:127,CATCH:128,THROW:129,"(":130,")":131,WhileSource:132,WHILE:133,WHEN:134,UNTIL:135,Loop:136,LOOP:137,ForBody:138,FOR:139,BY:140,ForStart:141,ForSource:142,ForVariables:143, +OWN:144,ForValue:145,FORIN:146,FOROF:147,FORFROM:148,SWITCH:149,Whens:150,ELSE:151,When:152,LEADING_WHEN:153,IfBlock:154,IF:155,POST_IF:156,UNARY:157,UNARY_MATH:158,"-":159,"+":160,"--":161,"++":162,"?":163,MATH:164,"**":165,SHIFT:166,COMPARE:167,"\x26":168,"^":169,"|":170,"\x26\x26":171,"||":172,"BIN?":173,RELATION:174,COMPOUND_ASSIGN:175,$accept:0,$end:1},terminals_:{2:"error",6:"TERMINATOR",12:"STATEMENT",28:"YIELD",29:"FROM",31:"INDENT",32:"OUTDENT",34:"IDENTIFIER",36:"PROPERTY",38:"NUMBER",40:"STRING", +41:"STRING_START",42:"STRING_END",44:"REGEX",45:"REGEX_START",46:"REGEX_END",48:"JS",49:"UNDEFINED",50:"NULL",51:"BOOL",52:"INFINITY",53:"NAN",55:"\x3d",58:":",61:"RETURN",62:"HERECOMMENT",63:"PARAM_START",65:"PARAM_END",67:"-\x3e",68:"\x3d\x3e",70:",",73:"...",82:".",83:"?.",84:"::",85:"?::",87:"INDEX_START",89:"INDEX_END",90:"INDEX_SOAK",92:"{",94:"}",95:"CLASS",96:"EXTENDS",97:"IMPORT",102:"AS",103:"DEFAULT",104:"IMPORT_ALL",105:"EXPORT",107:"EXPORT_ALL",112:"SUPER",113:"FUNC_EXIST",114:"CALL_START", +115:"CALL_END",117:"THIS",118:"@",119:"[",120:"]",122:"..",125:"TRY",127:"FINALLY",128:"CATCH",129:"THROW",130:"(",131:")",133:"WHILE",134:"WHEN",135:"UNTIL",137:"LOOP",139:"FOR",140:"BY",144:"OWN",146:"FORIN",147:"FOROF",148:"FORFROM",149:"SWITCH",151:"ELSE",153:"LEADING_WHEN",155:"IF",156:"POST_IF",157:"UNARY",158:"UNARY_MATH",159:"-",160:"+",161:"--",162:"++",163:"?",164:"MATH",165:"**",166:"SHIFT",167:"COMPARE",168:"\x26",169:"^",170:"|",171:"\x26\x26",172:"||",173:"BIN?",174:"RELATION",175:"COMPOUND_ASSIGN"}, +productions_:[0,[3,0],[3,1],[4,1],[4,3],[4,2],[5,1],[5,1],[5,1],[8,1],[8,1],[8,1],[8,1],[8,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[27,1],[27,2],[27,3],[30,2],[30,3],[33,1],[35,1],[37,1],[37,1],[39,1],[39,3],[43,1],[43,3],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[19,3],[19,4],[19,5],[56,1],[56,3],[56,5],[56,3],[56,5],[56,1],[59,1],[59,1],[59,1],[57,1],[57,1],[10,2],[10,1],[9,3],[9,2],[11,1],[17,5],[17,2],[66,1],[66,1],[69,0],[69,1],[64,0],[64, +1],[64,3],[64,4],[64,6],[71,1],[71,2],[71,3],[71,1],[72,1],[72,1],[72,1],[72,1],[76,2],[77,1],[77,2],[77,2],[77,1],[54,1],[54,1],[54,1],[15,1],[15,1],[15,1],[15,1],[15,1],[78,2],[78,2],[78,2],[78,2],[78,1],[78,1],[86,3],[86,2],[88,1],[88,1],[75,4],[93,0],[93,1],[93,3],[93,4],[93,6],[25,1],[25,2],[25,3],[25,4],[25,2],[25,3],[25,4],[25,5],[13,2],[13,4],[13,4],[13,5],[13,7],[13,6],[13,9],[100,1],[100,3],[100,4],[100,4],[100,6],[101,1],[101,3],[101,1],[101,3],[98,1],[99,3],[14,3],[14,5],[14,2],[14,4], +[14,5],[14,6],[14,3],[14,4],[14,7],[106,1],[106,3],[106,4],[106,4],[106,6],[108,1],[108,3],[108,3],[108,1],[16,3],[16,3],[16,3],[16,1],[111,1],[111,2],[109,0],[109,1],[110,2],[110,4],[81,1],[81,1],[60,2],[74,2],[74,4],[121,1],[121,1],[80,5],[91,3],[91,2],[91,2],[91,1],[116,1],[116,3],[116,4],[116,4],[116,6],[123,1],[123,1],[123,1],[124,1],[124,3],[21,2],[21,3],[21,4],[21,5],[126,3],[126,3],[126,2],[26,2],[79,3],[79,5],[132,2],[132,4],[132,2],[132,4],[22,2],[22,2],[22,2],[22,1],[136,2],[136,2],[23, +2],[23,2],[23,2],[138,2],[138,4],[138,2],[141,2],[141,3],[145,1],[145,1],[145,1],[145,1],[143,1],[143,3],[142,2],[142,2],[142,4],[142,4],[142,4],[142,6],[142,6],[142,2],[142,4],[24,5],[24,7],[24,4],[24,6],[150,1],[150,2],[152,3],[152,4],[154,3],[154,5],[20,1],[20,3],[20,3],[20,3],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,5],[18,4],[18,3]],performAction:function(a,n,pa,b,c,e,d){a= +e.length-1;switch(c){case 1:return this.$=b.addLocationDataFn(d[a],d[a])(new b.Block);case 2:return this.$=e[a];case 3:this.$=b.addLocationDataFn(d[a],d[a])(b.Block.wrap([e[a]]));break;case 4:this.$=b.addLocationDataFn(d[a-2],d[a])(e[a-2].push(e[a]));break;case 5:this.$=e[a-1];break;case 6:case 7:case 8:case 9:case 10:case 12:case 13:case 14:case 15:case 16:case 17:case 18:case 19:case 20:case 21:case 22:case 23:case 24:case 25:case 26:case 35:case 40:case 42:case 56:case 57:case 58:case 59:case 60:case 61:case 71:case 72:case 82:case 83:case 84:case 85:case 90:case 91:case 94:case 98:case 104:case 162:case 186:case 187:case 189:case 219:case 220:case 238:case 244:this.$= +e[a];break;case 11:this.$=b.addLocationDataFn(d[a],d[a])(new b.StatementLiteral(e[a]));break;case 27:this.$=b.addLocationDataFn(d[a],d[a])(new b.Op(e[a],new b.Value(new b.Literal(""))));break;case 28:case 248:case 249:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Op(e[a-1],e[a]));break;case 29:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Op(e[a-2].concat(e[a-1]),e[a]));break;case 30:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Block);break;case 31:case 105:this.$=b.addLocationDataFn(d[a-2],d[a])(e[a- +1]);break;case 32:this.$=b.addLocationDataFn(d[a],d[a])(new b.IdentifierLiteral(e[a]));break;case 33:this.$=b.addLocationDataFn(d[a],d[a])(new b.PropertyName(e[a]));break;case 34:this.$=b.addLocationDataFn(d[a],d[a])(new b.NumberLiteral(e[a]));break;case 36:this.$=b.addLocationDataFn(d[a],d[a])(new b.StringLiteral(e[a]));break;case 37:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.StringWithInterpolations(e[a-1]));break;case 38:this.$=b.addLocationDataFn(d[a],d[a])(new b.RegexLiteral(e[a]));break; +case 39:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.RegexWithInterpolations(e[a-1].args));break;case 41:this.$=b.addLocationDataFn(d[a],d[a])(new b.PassthroughLiteral(e[a]));break;case 43:this.$=b.addLocationDataFn(d[a],d[a])(new b.UndefinedLiteral);break;case 44:this.$=b.addLocationDataFn(d[a],d[a])(new b.NullLiteral);break;case 45:this.$=b.addLocationDataFn(d[a],d[a])(new b.BooleanLiteral(e[a]));break;case 46:this.$=b.addLocationDataFn(d[a],d[a])(new b.InfinityLiteral(e[a]));break;case 47:this.$= +b.addLocationDataFn(d[a],d[a])(new b.NaNLiteral);break;case 48:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Assign(e[a-2],e[a]));break;case 49:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Assign(e[a-3],e[a]));break;case 50:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Assign(e[a-4],e[a-1]));break;case 51:case 87:case 92:case 93:case 95:case 96:case 97:case 221:case 222:this.$=b.addLocationDataFn(d[a],d[a])(new b.Value(e[a]));break;case 52:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Assign(b.addLocationDataFn(d[a- +2])(new b.Value(e[a-2])),e[a],"object",{operatorToken:b.addLocationDataFn(d[a-1])(new b.Literal(e[a-1]))}));break;case 53:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Assign(b.addLocationDataFn(d[a-4])(new b.Value(e[a-4])),e[a-1],"object",{operatorToken:b.addLocationDataFn(d[a-3])(new b.Literal(e[a-3]))}));break;case 54:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Assign(b.addLocationDataFn(d[a-2])(new b.Value(e[a-2])),e[a],null,{operatorToken:b.addLocationDataFn(d[a-1])(new b.Literal(e[a-1]))})); +break;case 55:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Assign(b.addLocationDataFn(d[a-4])(new b.Value(e[a-4])),e[a-1],null,{operatorToken:b.addLocationDataFn(d[a-3])(new b.Literal(e[a-3]))}));break;case 62:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Return(e[a]));break;case 63:this.$=b.addLocationDataFn(d[a],d[a])(new b.Return);break;case 64:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.YieldReturn(e[a]));break;case 65:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.YieldReturn);break;case 66:this.$= +b.addLocationDataFn(d[a],d[a])(new b.Comment(e[a]));break;case 67:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Code(e[a-3],e[a],e[a-1]));break;case 68:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Code([],e[a],e[a-1]));break;case 69:this.$=b.addLocationDataFn(d[a],d[a])("func");break;case 70:this.$=b.addLocationDataFn(d[a],d[a])("boundfunc");break;case 73:case 110:this.$=b.addLocationDataFn(d[a],d[a])([]);break;case 74:case 111:case 130:case 150:case 181:case 223:this.$=b.addLocationDataFn(d[a], +d[a])([e[a]]);break;case 75:case 112:case 131:case 151:case 182:this.$=b.addLocationDataFn(d[a-2],d[a])(e[a-2].concat(e[a]));break;case 76:case 113:case 132:case 152:case 183:this.$=b.addLocationDataFn(d[a-3],d[a])(e[a-3].concat(e[a]));break;case 77:case 114:case 134:case 154:case 185:this.$=b.addLocationDataFn(d[a-5],d[a])(e[a-5].concat(e[a-2]));break;case 78:this.$=b.addLocationDataFn(d[a],d[a])(new b.Param(e[a]));break;case 79:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Param(e[a-1],null,!0)); +break;case 80:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Param(e[a-2],e[a]));break;case 81:case 188:this.$=b.addLocationDataFn(d[a],d[a])(new b.Expansion);break;case 86:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Splat(e[a-1]));break;case 88:this.$=b.addLocationDataFn(d[a-1],d[a])(e[a-1].add(e[a]));break;case 89:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Value(e[a-1],[].concat(e[a])));break;case 99:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Access(e[a]));break;case 100:this.$=b.addLocationDataFn(d[a- +1],d[a])(new b.Access(e[a],"soak"));break;case 101:this.$=b.addLocationDataFn(d[a-1],d[a])([b.addLocationDataFn(d[a-1])(new b.Access(new b.PropertyName("prototype"))),b.addLocationDataFn(d[a])(new b.Access(e[a]))]);break;case 102:this.$=b.addLocationDataFn(d[a-1],d[a])([b.addLocationDataFn(d[a-1])(new b.Access(new b.PropertyName("prototype"),"soak")),b.addLocationDataFn(d[a])(new b.Access(e[a]))]);break;case 103:this.$=b.addLocationDataFn(d[a],d[a])(new b.Access(new b.PropertyName("prototype"))); +break;case 106:this.$=b.addLocationDataFn(d[a-1],d[a])(b.extend(e[a],{soak:!0}));break;case 107:this.$=b.addLocationDataFn(d[a],d[a])(new b.Index(e[a]));break;case 108:this.$=b.addLocationDataFn(d[a],d[a])(new b.Slice(e[a]));break;case 109:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Obj(e[a-2],e[a-3].generated));break;case 115:this.$=b.addLocationDataFn(d[a],d[a])(new b.Class);break;case 116:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Class(null,null,e[a]));break;case 117:this.$=b.addLocationDataFn(d[a- +2],d[a])(new b.Class(null,e[a]));break;case 118:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Class(null,e[a-1],e[a]));break;case 119:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Class(e[a]));break;case 120:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Class(e[a-1],null,e[a]));break;case 121:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Class(e[a-2],e[a]));break;case 122:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Class(e[a-3],e[a-1],e[a]));break;case 123:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.ImportDeclaration(null, +e[a]));break;case 124:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.ImportDeclaration(new b.ImportClause(e[a-2],null),e[a]));break;case 125:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.ImportDeclaration(new b.ImportClause(null,e[a-2]),e[a]));break;case 126:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.ImportDeclaration(new b.ImportClause(null,new b.ImportSpecifierList([])),e[a]));break;case 127:this.$=b.addLocationDataFn(d[a-6],d[a])(new b.ImportDeclaration(new b.ImportClause(null,new b.ImportSpecifierList(e[a- +4])),e[a]));break;case 128:this.$=b.addLocationDataFn(d[a-5],d[a])(new b.ImportDeclaration(new b.ImportClause(e[a-4],e[a-2]),e[a]));break;case 129:this.$=b.addLocationDataFn(d[a-8],d[a])(new b.ImportDeclaration(new b.ImportClause(e[a-7],new b.ImportSpecifierList(e[a-4])),e[a]));break;case 133:case 153:case 168:case 184:this.$=b.addLocationDataFn(d[a-3],d[a])(e[a-2]);break;case 135:this.$=b.addLocationDataFn(d[a],d[a])(new b.ImportSpecifier(e[a]));break;case 136:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.ImportSpecifier(e[a- +2],e[a]));break;case 137:this.$=b.addLocationDataFn(d[a],d[a])(new b.ImportSpecifier(new b.Literal(e[a])));break;case 138:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.ImportSpecifier(new b.Literal(e[a-2]),e[a]));break;case 139:this.$=b.addLocationDataFn(d[a],d[a])(new b.ImportDefaultSpecifier(e[a]));break;case 140:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.ImportNamespaceSpecifier(new b.Literal(e[a-2]),e[a]));break;case 141:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.ExportNamedDeclaration(new b.ExportSpecifierList([]))); +break;case 142:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.ExportNamedDeclaration(new b.ExportSpecifierList(e[a-2])));break;case 143:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.ExportNamedDeclaration(e[a]));break;case 144:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.ExportNamedDeclaration(new b.Assign(e[a-2],e[a],null,{moduleDeclaration:"export"})));break;case 145:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.ExportNamedDeclaration(new b.Assign(e[a-3],e[a],null,{moduleDeclaration:"export"}))); +break;case 146:this.$=b.addLocationDataFn(d[a-5],d[a])(new b.ExportNamedDeclaration(new b.Assign(e[a-4],e[a-1],null,{moduleDeclaration:"export"})));break;case 147:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.ExportDefaultDeclaration(e[a]));break;case 148:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.ExportAllDeclaration(new b.Literal(e[a-2]),e[a]));break;case 149:this.$=b.addLocationDataFn(d[a-6],d[a])(new b.ExportNamedDeclaration(new b.ExportSpecifierList(e[a-4]),e[a]));break;case 155:this.$=b.addLocationDataFn(d[a], +d[a])(new b.ExportSpecifier(e[a]));break;case 156:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.ExportSpecifier(e[a-2],e[a]));break;case 157:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.ExportSpecifier(e[a-2],new b.Literal(e[a])));break;case 158:this.$=b.addLocationDataFn(d[a],d[a])(new b.ExportSpecifier(new b.Literal(e[a])));break;case 159:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.TaggedTemplateCall(e[a-2],e[a],e[a-1]));break;case 160:case 161:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Call(e[a- +2],e[a],e[a-1]));break;case 163:this.$=b.addLocationDataFn(d[a],d[a])(new b.SuperCall);break;case 164:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.SuperCall(e[a]));break;case 165:this.$=b.addLocationDataFn(d[a],d[a])(!1);break;case 166:this.$=b.addLocationDataFn(d[a],d[a])(!0);break;case 167:this.$=b.addLocationDataFn(d[a-1],d[a])([]);break;case 169:case 170:this.$=b.addLocationDataFn(d[a],d[a])(new b.Value(new b.ThisLiteral));break;case 171:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Value(b.addLocationDataFn(d[a- +1])(new b.ThisLiteral),[b.addLocationDataFn(d[a])(new b.Access(e[a]))],"this"));break;case 172:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Arr([]));break;case 173:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Arr(e[a-2]));break;case 174:this.$=b.addLocationDataFn(d[a],d[a])("inclusive");break;case 175:this.$=b.addLocationDataFn(d[a],d[a])("exclusive");break;case 176:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Range(e[a-3],e[a-1],e[a-2]));break;case 177:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Range(e[a- +2],e[a],e[a-1]));break;case 178:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Range(e[a-1],null,e[a]));break;case 179:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Range(null,e[a],e[a-1]));break;case 180:this.$=b.addLocationDataFn(d[a],d[a])(new b.Range(null,null,e[a]));break;case 190:this.$=b.addLocationDataFn(d[a-2],d[a])([].concat(e[a-2],e[a]));break;case 191:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Try(e[a]));break;case 192:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Try(e[a-1],e[a][0], +e[a][1]));break;case 193:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Try(e[a-2],null,null,e[a]));break;case 194:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Try(e[a-3],e[a-2][0],e[a-2][1],e[a]));break;case 195:this.$=b.addLocationDataFn(d[a-2],d[a])([e[a-1],e[a]]);break;case 196:this.$=b.addLocationDataFn(d[a-2],d[a])([b.addLocationDataFn(d[a-1])(new b.Value(e[a-1])),e[a]]);break;case 197:this.$=b.addLocationDataFn(d[a-1],d[a])([null,e[a]]);break;case 198:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Throw(e[a])); +break;case 199:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Parens(e[a-1]));break;case 200:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Parens(e[a-2]));break;case 201:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.While(e[a]));break;case 202:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.While(e[a-2],{guard:e[a]}));break;case 203:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.While(e[a],{invert:!0}));break;case 204:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.While(e[a-2],{invert:!0,guard:e[a]})); +break;case 205:this.$=b.addLocationDataFn(d[a-1],d[a])(e[a-1].addBody(e[a]));break;case 206:case 207:this.$=b.addLocationDataFn(d[a-1],d[a])(e[a].addBody(b.addLocationDataFn(d[a-1])(b.Block.wrap([e[a-1]]))));break;case 208:this.$=b.addLocationDataFn(d[a],d[a])(e[a]);break;case 209:this.$=b.addLocationDataFn(d[a-1],d[a])((new b.While(b.addLocationDataFn(d[a-1])(new b.BooleanLiteral("true")))).addBody(e[a]));break;case 210:this.$=b.addLocationDataFn(d[a-1],d[a])((new b.While(b.addLocationDataFn(d[a- +1])(new b.BooleanLiteral("true")))).addBody(b.addLocationDataFn(d[a])(b.Block.wrap([e[a]]))));break;case 211:case 212:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.For(e[a-1],e[a]));break;case 213:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.For(e[a],e[a-1]));break;case 214:this.$=b.addLocationDataFn(d[a-1],d[a])({source:b.addLocationDataFn(d[a])(new b.Value(e[a]))});break;case 215:this.$=b.addLocationDataFn(d[a-3],d[a])({source:b.addLocationDataFn(d[a-2])(new b.Value(e[a-2])),step:e[a]});break; +case 216:b=b.addLocationDataFn(d[a-1],d[a]);e[a].own=e[a-1].own;e[a].ownTag=e[a-1].ownTag;e[a].name=e[a-1][0];e[a].index=e[a-1][1];this.$=b(e[a]);break;case 217:this.$=b.addLocationDataFn(d[a-1],d[a])(e[a]);break;case 218:c=b.addLocationDataFn(d[a-2],d[a]);e[a].own=!0;e[a].ownTag=b.addLocationDataFn(d[a-1])(new b.Literal(e[a-1]));this.$=c(e[a]);break;case 224:this.$=b.addLocationDataFn(d[a-2],d[a])([e[a-2],e[a]]);break;case 225:this.$=b.addLocationDataFn(d[a-1],d[a])({source:e[a]});break;case 226:this.$= +b.addLocationDataFn(d[a-1],d[a])({source:e[a],object:!0});break;case 227:this.$=b.addLocationDataFn(d[a-3],d[a])({source:e[a-2],guard:e[a]});break;case 228:this.$=b.addLocationDataFn(d[a-3],d[a])({source:e[a-2],guard:e[a],object:!0});break;case 229:this.$=b.addLocationDataFn(d[a-3],d[a])({source:e[a-2],step:e[a]});break;case 230:this.$=b.addLocationDataFn(d[a-5],d[a])({source:e[a-4],guard:e[a-2],step:e[a]});break;case 231:this.$=b.addLocationDataFn(d[a-5],d[a])({source:e[a-4],step:e[a-2],guard:e[a]}); +break;case 232:this.$=b.addLocationDataFn(d[a-1],d[a])({source:e[a],from:!0});break;case 233:this.$=b.addLocationDataFn(d[a-3],d[a])({source:e[a-2],guard:e[a],from:!0});break;case 234:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Switch(e[a-3],e[a-1]));break;case 235:this.$=b.addLocationDataFn(d[a-6],d[a])(new b.Switch(e[a-5],e[a-3],e[a-1]));break;case 236:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Switch(null,e[a-1]));break;case 237:this.$=b.addLocationDataFn(d[a-5],d[a])(new b.Switch(null,e[a- +3],e[a-1]));break;case 239:this.$=b.addLocationDataFn(d[a-1],d[a])(e[a-1].concat(e[a]));break;case 240:this.$=b.addLocationDataFn(d[a-2],d[a])([[e[a-1],e[a]]]);break;case 241:this.$=b.addLocationDataFn(d[a-3],d[a])([[e[a-2],e[a-1]]]);break;case 242:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.If(e[a-1],e[a],{type:e[a-2]}));break;case 243:this.$=b.addLocationDataFn(d[a-4],d[a])(e[a-4].addElse(b.addLocationDataFn(d[a-2],d[a])(new b.If(e[a-1],e[a],{type:e[a-2]}))));break;case 245:this.$=b.addLocationDataFn(d[a- +2],d[a])(e[a-2].addElse(e[a]));break;case 246:case 247:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.If(e[a],b.addLocationDataFn(d[a-2])(b.Block.wrap([e[a-2]])),{type:e[a-1],statement:!0}));break;case 250:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Op("-",e[a]));break;case 251:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Op("+",e[a]));break;case 252:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Op("--",e[a]));break;case 253:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Op("++",e[a]));break;case 254:this.$= +b.addLocationDataFn(d[a-1],d[a])(new b.Op("--",e[a-1],null,!0));break;case 255:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Op("++",e[a-1],null,!0));break;case 256:this.$=b.addLocationDataFn(d[a-1],d[a])(new b.Existence(e[a-1]));break;case 257:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Op("+",e[a-2],e[a]));break;case 258:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Op("-",e[a-2],e[a]));break;case 259:case 260:case 261:case 262:case 263:case 264:case 265:case 266:case 267:case 268:this.$=b.addLocationDataFn(d[a- +2],d[a])(new b.Op(e[a-1],e[a-2],e[a]));break;case 269:d=b.addLocationDataFn(d[a-2],d[a]);e="!"===e[a-1].charAt(0)?(new b.Op(e[a-1].slice(1),e[a-2],e[a])).invert():new b.Op(e[a-1],e[a-2],e[a]);this.$=d(e);break;case 270:this.$=b.addLocationDataFn(d[a-2],d[a])(new b.Assign(e[a-2],e[a],e[a-1]));break;case 271:this.$=b.addLocationDataFn(d[a-4],d[a])(new b.Assign(e[a-4],e[a-1],e[a-3]));break;case 272:this.$=b.addLocationDataFn(d[a-3],d[a])(new b.Assign(e[a-3],e[a],e[a-2]));break;case 273:this.$=b.addLocationDataFn(d[a- +2],d[a])(new b.Extends(e[a-2],e[a]))}},table:[{1:[2,1],3:1,4:2,5:3,7:4,8:5,9:6,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y, +158:V,159:Z,160:K,161:ba,162:R},{1:[3]},{1:[2,2],6:xa},a(ua,[2,3]),a(ua,[2,6],{141:77,132:102,138:103,133:z,135:B,139:C,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(ua,[2,7],{141:77,132:105,138:106,133:z,135:B,139:C,156:ya}),a(ua,[2,8]),a(ta,[2,14],{109:107,78:108,86:114,40:Ja,41:Ja,114:Ja,82:Ta,83:Ga,84:Ua,85:Fa,87:Pa,90:Qa,113:Na}),a(ta,[2,15],{86:114,109:117,78:118,82:Ta,83:Ga,84:Ua,85:Fa,87:Pa,90:Qa,113:Na,114:Ja}),a(ta,[2,16]),a(ta, +[2,17]),a(ta,[2,18]),a(ta,[2,19]),a(ta,[2,20]),a(ta,[2,21]),a(ta,[2,22]),a(ta,[2,23]),a(ta,[2,24]),a(ta,[2,25]),a(ta,[2,26]),a(za,[2,9]),a(za,[2,10]),a(za,[2,11]),a(za,[2,12]),a(za,[2,13]),a([1,6,32,42,131,133,135,139,156,163,164,165,166,167,168,169,170,171,172,173,174],na,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120, +8:122,12:c,28:fa,29:Ha,34:f,38:D,40:l,41:w,44:G,45:F,48:x,49:v,50:M,51:N,52:J,53:r,61:[1,119],62:O,63:m,67:k,68:t,92:p,95:h,97:y,105:P,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,137:W,149:Q,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R}),a(Aa,Ia,{55:[1,124]}),a(Aa,[2,95]),a(Aa,[2,96]),a(Aa,[2,97]),a(Aa,[2,98]),a(b,[2,162]),a([6,31,65,70],n,{64:125,71:126,72:127,33:129,60:130,74:131,75:132,34:f,73:pa,92:p,118:e,119:d}),{30:135,31:va},{7:137,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11, +20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:138,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, +25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:139,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70, +34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:140,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w, +43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{15:142,16:143,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:144,60:71,74:53,75:54,77:141,79:28,80:29,81:30,92:p,111:31,112:A,117:H,118:U,119:T, +130:S},{15:142,16:143,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:144,60:71,74:53,75:54,77:145,79:28,80:29,81:30,92:p,111:31,112:A,117:H,118:U,119:T,130:S},a(Ka,wa,{96:[1,149],161:[1,146],162:[1,147],175:[1,148]}),a(ta,[2,244],{151:[1,150]}),{30:151,31:va},{30:152,31:va},a(ta,[2,208]),{30:153,31:va},{7:154,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:[1,155],33:70,34:f,37:55, +38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Gb,[2,115],{47:27,79:28,80:29,81:30,111:31,74:53,75:54,37:55,43:57,33:70,60:71,39:80,15:142,16:143,54:144,30:156,77:158,31:va,34:f,38:D,40:l,41:w,44:G,45:F,48:x,49:v,50:M, +51:N,52:J,53:r,92:p,96:[1,157],112:A,117:H,118:U,119:T,130:S}),{7:159,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y, +158:V,159:Z,160:K,161:ba,162:R},a(za,ab,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:160,12:c,28:fa,34:f,38:D,40:l,41:w,44:G,45:F,48:x,49:v,50:M,51:N,52:J,53:r,61:E,62:O,63:m,67:k,68:t,92:p,95:h,97:y,105:P,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,137:W,149:Q,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R}),a([1,6, +31,32,42,70,94,131,133,135,139,156],[2,66]),{33:165,34:f,39:161,40:l,41:w,92:[1,164],98:162,99:163,104:Hb},{25:168,33:169,34:f,92:[1,167],95:h,103:[1,170],107:[1,171]},a(Ka,[2,92]),a(Ka,[2,93]),a(Aa,[2,40]),a(Aa,[2,41]),a(Aa,[2,42]),a(Aa,[2,43]),a(Aa,[2,44]),a(Aa,[2,45]),a(Aa,[2,46]),a(Aa,[2,47]),{4:172,5:3,7:4,8:5,9:6,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,31:[1,173],33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27, +48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:174,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:bb,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J, +53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,73:Wa,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,116:176,117:H,118:U,119:T,120:Ib,123:177,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Aa,[2,169]),a(Aa,[2,170],{35:181,36:Oa}),a([1,6,31,32,42,46,65,70,73,82,83,84,85,87,89,90,94,113,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],[2,163], +{110:183,114:ub}),{31:[2,69]},{31:[2,70]},a(La,[2,87]),a(La,[2,90]),{7:185,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X, +157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:186,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba, +162:R},{7:187,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:189,8:122,10:20,11:21, +12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,30:188,31:va,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{33:194,34:f,60:195,74:196,75:197,80:190, +92:p,118:e,119:T,143:191,144:[1,192],145:193},{142:198,146:[1,199],147:[1,200],148:[1,201]},a([6,31,70,94],Jb,{39:80,93:202,56:203,57:204,59:205,11:206,37:207,33:208,35:209,60:210,34:f,36:Oa,38:D,40:l,41:w,62:O,118:e}),a(Kb,[2,34]),a(Kb,[2,35]),a(Aa,[2,38]),{15:142,16:211,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:144,60:71,74:53,75:54,77:212,79:28,80:29,81:30,92:p,111:31,112:A,117:H,118:U,119:T,130:S},a([1,6,29,31,32,40,41,42,55,58,65,70,73,82,83, +84,85,87,89,90,94,96,102,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],[2,32]),a(Lb,[2,36]),{4:213,5:3,7:4,8:5,9:6,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31, +112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(ua,[2,5],{7:4,8:5,9:6,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,5:214,12:c,28:u,34:f,38:D,40:l,41:w,44:G,45:F,48:x,49:v,50:M,51:N,52:J,53:r,61:E,62:O,63:m,67:k,68:t, +92:p,95:h,97:y,105:P,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,133:z,135:B,137:W,139:C,149:Q,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R}),a(ta,[2,256]),{7:215,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I, +129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:216,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B, +136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:217,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77, +149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:218,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V, +159:Z,160:K,161:ba,162:R},{7:219,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:220, +8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:221,8:122,10:20,11:21,12:c,13:23, +14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:222,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11, +20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:223,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, +25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:224,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70, +34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:225,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w, +43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:226,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v, +50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:227,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71, +61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:228,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t, +74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(ta,[2,207]),a(ta,[2,212]),{7:229,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53, +75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(ta,[2,206]),a(ta,[2,211]),{39:230,40:l,41:w,110:231,114:ub},a(La,[2,88]),a(Mb,[2,166]),{35:232,36:Oa},{35:233,36:Oa},a(La,[2,103],{35:234,36:Oa}),{35:235,36:Oa},a(La,[2,104]),{7:237,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19, +28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,73:Nb,74:53,75:54,77:40,79:28,80:29,81:30,88:236,91:238,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,121:239,122:vb,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{86:242,87:Pa,90:Qa},{110:243,114:ub},a(La,[2,89]),a(ua,[2,65],{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15, +24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:244,12:c,28:fa,34:f,38:D,40:l,41:w,44:G,45:F,48:x,49:v,50:M,51:N,52:J,53:r,61:E,62:O,63:m,67:k,68:t,92:p,95:h,97:y,105:P,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,133:ab,135:ab,139:ab,156:ab,137:W,149:Q,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R}),a(Ma,[2,28],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha, +166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{7:245,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y, +158:V,159:Z,160:K,161:ba,162:R},{132:105,133:z,135:B,138:106,139:C,141:77,156:ya},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,163,164,165,166,167,168,169,170,171,172,173,174],na,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120,8:122,12:c,28:fa,29:Ha,34:f,38:D,40:l,41:w,44:G,45:F,48:x,49:v,50:M, +51:N,52:J,53:r,61:E,62:O,63:m,67:k,68:t,92:p,95:h,97:y,105:P,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,137:W,149:Q,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R}),{6:[1,247],7:246,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:[1,248],33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31, +112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a([6,31],Ea,{69:251,65:[1,249],70:Ob}),a(Va,[2,74]),a(Va,[2,78],{55:[1,253],73:[1,252]}),a(Va,[2,81]),a(hb,[2,82]),a(hb,[2,83]),a(hb,[2,84]),a(hb,[2,85]),{35:181,36:Oa},{7:254,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:bb,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F, +47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,73:Wa,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,116:176,117:H,118:U,119:T,120:Ib,123:177,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(ta,[2,68]),{4:256,5:3,7:4,8:5,9:6,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,32:[1,255],33:70,34:f,37:55, +38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,164,165,166,167,168,169,170,171,172,173,174],[2,248],{141:77,132:102,138:103,163:ea}),a(cb, +[2,249],{141:77,132:102,138:103,163:ea,165:ha}),a(cb,[2,250],{141:77,132:102,138:103,163:ea,165:ha}),a(cb,[2,251],{141:77,132:102,138:103,163:ea,165:ha}),a(ta,[2,252],{40:wa,41:wa,82:wa,83:wa,84:wa,85:wa,87:wa,90:wa,113:wa,114:wa}),a(Mb,Ja,{109:107,78:108,86:114,82:Ta,83:Ga,84:Ua,85:Fa,87:Pa,90:Qa,113:Na}),{78:118,82:Ta,83:Ga,84:Ua,85:Fa,86:114,87:Pa,90:Qa,109:117,113:Na,114:Ja},a(Pb,Ia),a(ta,[2,253],{40:wa,41:wa,82:wa,83:wa,84:wa,85:wa,87:wa,90:wa,113:wa,114:wa}),a(ta,[2,254]),a(ta,[2,255]),{6:[1, +259],7:257,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:[1,258],33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:260,8:122,10:20, +11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{30:261,31:va,155:[1,262]},a(ta,[2,191],{126:263, +127:[1,264],128:[1,265]}),a(ta,[2,205]),a(ta,[2,213]),{31:[1,266],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},{150:267,152:268,153:ib},a(ta,[2,116]),{7:270,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k, +68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Gb,[2,119],{30:271,31:va,40:wa,41:wa,82:wa,83:wa,84:wa,85:wa,87:wa,90:wa,113:wa,114:wa,96:[1,272]}),a(Ma,[2,198],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(za,db,{141:77,132:102,138:103,159:ka,160:ca, +163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(za,[2,123]),{29:[1,273],70:[1,274]},{29:[1,275]},{31:jb,33:280,34:f,94:[1,276],100:277,101:278,103:Ya},a([29,70],[2,139]),{102:[1,282]},{31:wb,33:287,34:f,94:[1,283],103:eb,106:284,108:285},a(za,[2,143]),{55:[1,289]},{7:290,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M, +51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{29:[1,291]},{6:xa,131:[1,292]},{4:293,5:3,7:4,8:5,9:6,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x, +49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a([6,31,70,120],Qb,{141:77,132:102,138:103,121:294,73:[1,295],122:vb,133:z,135:B,139:C,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(xb,[2,172]),a([6,31,120], +Ea,{69:296,70:kb}),a(Ra,[2,181]),{7:254,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:bb,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,73:Wa,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,116:298,117:H,118:U,119:T,123:177,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X, +157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Ra,[2,187]),a(Ra,[2,188]),a(Rb,[2,171]),a(Rb,[2,33]),a(b,[2,164]),{7:254,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:bb,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,73:Wa,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,115:[1,299],116:300,117:H,118:U,119:T,123:177,125:I, +129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{30:301,31:va,132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},a(Sb,[2,201],{141:77,132:102,138:103,133:z,134:[1,302],135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Sb,[2,203],{141:77,132:102,138:103,133:z,134:[1,303], +135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(ta,[2,209]),a(Za,[2,210],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],[2,214],{140:[1,304]}),a(lb,[2,217]),{33:194,34:f,60:195,74:196,75:197,92:p,118:e,119:d,143:305,145:193}, +a(lb,[2,223],{70:[1,306]}),a(mb,[2,219]),a(mb,[2,220]),a(mb,[2,221]),a(mb,[2,222]),a(ta,[2,216]),{7:307,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C, +141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:308,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y, +158:V,159:Z,160:K,161:ba,162:R},{7:309,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R}, +a(nb,Ea,{69:310,70:Tb}),a(Ba,[2,111]),a(Ba,[2,51],{58:[1,312]}),a(Ub,[2,60],{55:[1,313]}),a(Ba,[2,56]),a(Ub,[2,61]),a(yb,[2,57]),a(yb,[2,58]),a(yb,[2,59]),{46:[1,314],78:118,82:Ta,83:Ga,84:Ua,85:Fa,86:114,87:Pa,90:Qa,109:117,113:Na,114:Ja},a(Pb,wa),{6:xa,42:[1,315]},a(ua,[2,4]),a(Vb,[2,257],{141:77,132:102,138:103,163:ea,164:ga,165:ha}),a(Vb,[2,258],{141:77,132:102,138:103,163:ea,164:ga,165:ha}),a(cb,[2,259],{141:77,132:102,138:103,163:ea,165:ha}),a(cb,[2,260],{141:77,132:102,138:103,163:ea,165:ha}), +a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,166,167,168,169,170,171,172,173,174],[2,261],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173],[2,262],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,168,169,170,171,172,173],[2,263],{141:77,132:102,138:103,159:ka,160:ca, +163:ea,164:ga,165:ha,166:la,167:oa,174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,169,170,171,172,173],[2,264],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,170,171,172,173],[2,265],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,171,172, +173],[2,266],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,172,173],[2,267],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,173],[2,268],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma, +174:ia}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173,174],[2,269],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la}),a(Za,[2,247],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Za,[2,246],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(b, +[2,159]),a(b,[2,160]),a(La,[2,99]),a(La,[2,100]),a(La,[2,101]),a(La,[2,102]),{89:[1,316]},{73:Nb,89:[2,107],121:317,122:vb,132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},{89:[2,108]},{7:318,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26, +60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,180],92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Wb,[2,174]),a(Wb,Xb),a(La,[2,106]),a(b,[2,161]),a(ua,[2,64],{141:77,132:102,138:103,133:db,135:db,139:db,156:db,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ma,[2,29],{141:77,132:102, +138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ma,[2,48],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{7:319,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53, +75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:320,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30, +92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{66:321,67:k,68:t},a(Sa,fb,{72:127,33:129,60:130,74:131,75:132,71:322,34:f,73:pa,92:p,118:e,119:d}),{6:Yb,31:Zb},a(Va,[2,79]),{7:325,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M, +51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Ra,Qb,{141:77,132:102,138:103,73:[1,326],133:z,135:B,139:C,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a($b,[2,30]),{6:xa,32:[1,327]},a(Ma,[2,270],{141:77,132:102, +138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{7:328,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W, +138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:329,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41, +155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Ma,[2,273],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(ta,[2,245]),{7:330,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y, +105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(ta,[2,192],{127:[1,331]}),{30:332,31:va},{30:335,31:va,33:333,34:f,75:334,92:p},{150:336,152:268,153:ib},{32:[1,337],151:[1,338],152:339,153:ib},a(ob,[2,238]),{7:341,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F, +47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,124:340,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(ac,[2,117],{141:77,132:102,138:103,30:342,31:va,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(ta,[2,120]),{7:343,8:122,10:20, +11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{39:344,40:l,41:w},{92:[1,346],99:345,104:Hb},{39:347, +40:l,41:w},{29:[1,348]},a(nb,Ea,{69:349,70:pb}),a(Ba,[2,130]),{31:jb,33:280,34:f,100:351,101:278,103:Ya},a(Ba,[2,135],{102:[1,352]}),a(Ba,[2,137],{102:[1,353]}),{33:354,34:f},a(za,[2,141]),a(nb,Ea,{69:355,70:zb}),a(Ba,[2,150]),{31:wb,33:287,34:f,103:eb,106:357,108:285},a(Ba,[2,155],{102:[1,358]}),a(Ba,[2,158]),{6:[1,360],7:359,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:[1,361],33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G, +45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Ab,[2,147],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{39:362,40:l,41:w},a(Aa,[2,199]),{6:xa,32:[1,363]}, +{7:364,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a([12,28,34,38,40,41,44,45,48, +49,50,51,52,53,61,62,63,67,68,92,95,97,105,112,117,118,119,125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Xb,{6:gb,31:gb,70:gb,120:gb}),{6:qb,31:rb,120:[1,365]},a([6,31,32,115,120],fb,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,76:179,7:254,123:368,12:c,28:fa,34:f,38:D,40:l,41:w,44:G,45:F,48:x,49:v, +50:M,51:N,52:J,53:r,61:E,62:O,63:m,67:k,68:t,73:Wa,92:p,95:h,97:y,105:P,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,133:z,135:B,137:W,139:C,149:Q,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R}),a(Sa,Ea,{69:369,70:kb}),a(b,[2,167]),a([6,31,115],Ea,{69:370,70:kb}),a(bc,[2,242]),{7:371,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E, +62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:372,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53, +75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:373,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30, +92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(lb,[2,218]),{33:194,34:f,60:195,74:196,75:197,92:p,118:e,119:d,145:374},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,156],[2,225],{141:77,132:102,138:103,134:[1,375],140:[1,376],159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Bb,[2,226],{141:77,132:102, +138:103,134:[1,377],159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Bb,[2,232],{141:77,132:102,138:103,134:[1,378],159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{6:cc,31:dc,94:[1,379]},a(Cb,fb,{39:80,57:204,59:205,11:206,37:207,33:208,35:209,60:210,56:382,34:f,36:Oa,38:D,40:l,41:w,62:O,118:e}),{7:383,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17, +26:18,27:19,28:fa,31:[1,384],33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:385,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa, +31:[1,386],33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Aa,[2,39]),a(Lb,[2,37]),a(La,[2,105]),{7:387,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17, +26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,178],92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{89:[2,179],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra, +171:da,172:ma,173:sa,174:ia},a(Ma,[2,49],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{32:[1,388],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},{30:389,31:va},a(Va,[2,75]),{33:129,34:f,60:130,71:390,72:127,73:pa,74:131,75:132,92:p,118:e,119:d},a(ec,n,{71:126,72:127,33:129,60:130,74:131,75:132,64:391,34:f,73:pa,92:p,118:e, +119:d}),a(Va,[2,80],{141:77,132:102,138:103,133:z,135:B,139:C,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ra,gb),a($b,[2,31]),{32:[1,392],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},a(Ma,[2,272],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{30:393,31:va,132:102, +133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},{30:394,31:va},a(ta,[2,193]),{30:395,31:va},{30:396,31:va},a(Db,[2,197]),{32:[1,397],151:[1,398],152:339,153:ib},a(ta,[2,236]),{30:399,31:va},a(ob,[2,239]),{30:400,31:va,70:[1,401]},a(fc,[2,189],{141:77,132:102,138:103,133:z,135:B,139:C,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(ta,[2,118]),a(ac,[2, +121],{141:77,132:102,138:103,30:402,31:va,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(za,[2,124]),{29:[1,403]},{31:jb,33:280,34:f,100:404,101:278,103:Ya},a(za,[2,125]),{39:405,40:l,41:w},{6:sb,31:tb,94:[1,406]},a(Cb,fb,{33:280,101:409,34:f,103:Ya}),a(Sa,Ea,{69:410,70:pb}),{33:411,34:f},{33:412,34:f},{29:[2,140]},{6:Eb,31:Fb,94:[1,413]},a(Cb,fb,{33:287,108:416,34:f,103:eb}),a(Sa,Ea,{69:417,70:zb}),{33:418,34:f,103:[1,419]}, +a(Ab,[2,144],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{7:420,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I, +129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:421,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B, +136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(za,[2,148]),{131:[1,422]},{120:[1,423],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},a(xb,[2,173]),{7:254,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r, +54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,73:Wa,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,123:424,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:254,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,31:bb,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26, +60:71,61:E,62:O,63:m,66:33,67:k,68:t,73:Wa,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,116:425,117:H,118:U,119:T,123:177,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Ra,[2,182]),{6:qb,31:rb,32:[1,426]},{6:qb,31:rb,115:[1,427]},a(Za,[2,202],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Za, +[2,204],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Za,[2,215],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(lb,[2,224]),{7:428,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x, +49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:429,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26, +60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:430,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k, +68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:431,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28, +80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(xb,[2,109]),{11:206,33:208,34:f,35:209,36:Oa,37:207,38:D,39:80,40:l,41:w,56:432,57:204,59:205,60:210,62:O,118:e},a(ec,Jb,{39:80,56:203,57:204,59:205,11:206,37:207,33:208,35:209,60:210,93:433,34:f,36:Oa,38:D,40:l,41:w,62:O,118:e}),a(Ba,[2,112]),a(Ba,[2,52],{141:77,132:102,138:103,133:z,135:B,139:C,156:qa,159:ka, +160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{7:434,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77, +149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},a(Ba,[2,54],{141:77,132:102,138:103,133:z,135:B,139:C,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{7:435,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29, +81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{89:[2,177],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},a(ta,[2,50]),a(ta,[2,67]),a(Va,[2,76]),a(Sa,Ea,{69:436,70:Ob}),a(ta,[2,271]),a(bc,[2,243]),a(ta,[2,194]),a(Db,[2,195]),a(Db,[2,196]),a(ta,[2,234]),{30:437,31:va}, +{32:[1,438]},a(ob,[2,240],{6:[1,439]}),{7:440,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba, +162:R},a(ta,[2,122]),{39:441,40:l,41:w},a(nb,Ea,{69:442,70:pb}),a(za,[2,126]),{29:[1,443]},{33:280,34:f,101:444,103:Ya},{31:jb,33:280,34:f,100:445,101:278,103:Ya},a(Ba,[2,131]),{6:sb,31:tb,32:[1,446]},a(Ba,[2,136]),a(Ba,[2,138]),a(za,[2,142],{29:[1,447]}),{33:287,34:f,103:eb,108:448},{31:wb,33:287,34:f,103:eb,106:449,108:285},a(Ba,[2,151]),{6:Eb,31:Fb,32:[1,450]},a(Ba,[2,156]),a(Ba,[2,157]),a(Ab,[2,145],{141:77,132:102,138:103,133:z,135:B,139:C,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L, +169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),{32:[1,451],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},a(Aa,[2,200]),a(Aa,[2,176]),a(Ra,[2,183]),a(Sa,Ea,{69:452,70:kb}),a(Ra,[2,184]),a(b,[2,168]),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,156],[2,227],{141:77,132:102,138:103,140:[1,453],159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}), +a(Bb,[2,229],{141:77,132:102,138:103,134:[1,454],159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ma,[2,228],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ma,[2,233],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ba,[2,113]),a(Sa,Ea,{69:455,70:Tb}),{32:[1,456],132:102,133:z,135:B,138:103,139:C, +141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},{32:[1,457],132:102,133:z,135:B,138:103,139:C,141:77,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia},{6:Yb,31:Zb,32:[1,458]},{32:[1,459]},a(ta,[2,237]),a(ob,[2,241]),a(fc,[2,190],{141:77,132:102,138:103,133:z,135:B,139:C,156:qa,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(za, +[2,128]),{6:sb,31:tb,94:[1,460]},{39:461,40:l,41:w},a(Ba,[2,132]),a(Sa,Ea,{69:462,70:pb}),a(Ba,[2,133]),{39:463,40:l,41:w},a(Ba,[2,152]),a(Sa,Ea,{69:464,70:zb}),a(Ba,[2,153]),a(za,[2,146]),{6:qb,31:rb,32:[1,465]},{7:466,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30, +92:p,95:h,97:y,105:P,111:31,112:A,117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{7:467,8:122,10:20,11:21,12:c,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:fa,33:70,34:f,37:55,38:D,39:80,40:l,41:w,43:57,44:G,45:F,47:27,48:x,49:v,50:M,51:N,52:J,53:r,54:26,60:71,61:E,62:O,63:m,66:33,67:k,68:t,74:53,75:54,77:40,79:28,80:29,81:30,92:p,95:h,97:y,105:P,111:31,112:A, +117:H,118:U,119:T,125:I,129:aa,130:S,132:43,133:z,135:B,136:44,137:W,138:45,139:C,141:77,149:Q,154:41,155:X,157:Y,158:V,159:Z,160:K,161:ba,162:R},{6:cc,31:dc,32:[1,468]},a(Ba,[2,53]),a(Ba,[2,55]),a(Va,[2,77]),a(ta,[2,235]),{29:[1,469]},a(za,[2,127]),{6:sb,31:tb,32:[1,470]},a(za,[2,149]),{6:Eb,31:Fb,32:[1,471]},a(Ra,[2,185]),a(Ma,[2,230],{141:77,132:102,138:103,159:ka,160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ma,[2,231],{141:77,132:102,138:103,159:ka, +160:ca,163:ea,164:ga,165:ha,166:la,167:oa,168:L,169:ja,170:ra,171:da,172:ma,173:sa,174:ia}),a(Ba,[2,114]),{39:472,40:l,41:w},a(Ba,[2,134]),a(Ba,[2,154]),a(za,[2,129])],defaultActions:{68:[2,69],69:[2,70],238:[2,108],354:[2,140]},parseError:function(a,b){if(b.recoverable)this.trace(a);else{var d=function(a,b){this.message=a;this.hash=b};d.prototype=Error;throw new d(a,b);}},parse:function(a){var b=[0],d=[null],e=[],n=this.table,pa="",c=0,f=0,g=0,h=e.slice.call(arguments,1),va=Object.create(this.lexer), +k={},Ka;for(Ka in this.yy)Object.prototype.hasOwnProperty.call(this.yy,Ka)&&(k[Ka]=this.yy[Ka]);va.setInput(a,k);k.lexer=va;k.parser=this;"undefined"==typeof va.yylloc&&(va.yylloc={});Ka=va.yylloc;e.push(Ka);var m=va.options&&va.options.ranges;this.parseError="function"===typeof k.parseError?k.parseError:Object.getPrototypeOf(this).parseError;for(var t,p,Ia,l,r={},y,q;;){Ia=b[b.length-1];if(this.defaultActions[Ia])l=this.defaultActions[Ia];else{if(null===t||"undefined"==typeof t)t=va.lex()||1,"number"!== +typeof t&&(t=this.symbols_[t]||t);l=n[Ia]&&n[Ia][t]}if("undefined"===typeof l||!l.length||!l[0]){var wa;q=[];for(y in n[Ia])this.terminals_[y]&&2=h?this.wrapInBraces(n):n};b.prototype.compileRoot=function(a){var b,e,d,n,c;a.indent=a.bare?"":ca;a.level=A;this.spaced= +!0;a.scope=new K(null,this,null,null!=(d=a.referencedVars)?d:[]);c=a.locals||[];d=0;for(e=c.length;d=y?this.wrapInBraces(b):b};return b}(S);g.StringLiteral=$a=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}na(b,a);return b}(H);g.RegexLiteral=Y=function(a){function b(){return b.__super__.constructor.apply(this,arguments)} +na(b,a);return b}(H);g.PassthroughLiteral=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}na(b,a);return b}(H);g.IdentifierLiteral=r=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}na(b,a);b.prototype.isAssignable=ja;return b}(H);g.PropertyName=Q=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}na(b,a);b.prototype.isAssignable=ja;return b}(H);g.StatementLiteral=ya=function(a){function b(){return b.__super__.constructor.apply(this, +arguments)}na(b,a);b.prototype.isStatement=ja;b.prototype.makeReturn=ea;b.prototype.jumps=function(a){if("break"===this.value&&!(null!=a&&a.loop||null!=a&&a.block)||"continue"===this.value&&(null==a||!a.loop))return this};b.prototype.compileNode=function(a){return[this.makeCode(""+this.tab+this.value+";")]};return b}(H);g.ThisLiteral=ga=function(a){function b(){b.__super__.constructor.call(this,"this")}na(b,a);b.prototype.compileNode=function(a){var b;a=null!=(b=a.scope.method)&&b.bound?a.scope.method.context: +this.value;return[this.makeCode(a)]};return b}(H);g.UndefinedLiteral=oa=function(a){function b(){b.__super__.constructor.call(this,"undefined")}na(b,a);b.prototype.compileNode=function(a){return[this.makeCode(a.level>=t?"(void 0)":"void 0")]};return b}(H);g.NullLiteral=aa=function(a){function b(){b.__super__.constructor.call(this,"null")}na(b,a);return b}(H);g.BooleanLiteral=Ca=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}na(b,a);return b}(H);g.Return=V=function(a){function b(a){this.expression= +a}na(b,a);b.prototype.children=["expression"];b.prototype.isStatement=ja;b.prototype.makeReturn=ea;b.prototype.jumps=ea;b.prototype.compileToFragments=function(a,pa){var e,d;e=null!=(d=this.expression)?d.makeReturn():void 0;return!e||e instanceof b?b.__super__.compileToFragments.call(this,a,pa):e.compileToFragments(a,pa)};b.prototype.compileNode=function(a){var b;b=[];b.push(this.makeCode(this.tab+("return"+(this.expression?" ":""))));this.expression&&(b=b.concat(this.expression.compileToFragments(a, +P)));b.push(this.makeCode(";"));return b};return b}(a);g.YieldReturn=ra=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}na(b,a);b.prototype.compileNode=function(a){null==a.scope.parent&&this.error("yield can only occur inside functions");return b.__super__.compileNode.apply(this,arguments)};return b}(V);g.Value=L=function(a){function b(a,pa,e){if(!pa&&a instanceof b)return a;this.base=a;this.properties=pa||[];e&&(this[e]=!0);return this}na(b,a);b.prototype.children=["base", +"properties"];b.prototype.add=function(a){this.properties=this.properties.concat(a);return this};b.prototype.hasProperties=function(){return!!this.properties.length};b.prototype.bareLiteral=function(a){return!this.properties.length&&this.base instanceof a};b.prototype.isArray=function(){return this.bareLiteral(qa)};b.prototype.isRange=function(){return this.bareLiteral(X)};b.prototype.isComplex=function(){return this.hasProperties()||this.base.isComplex()};b.prototype.isAssignable=function(){return this.hasProperties()|| +this.base.isAssignable()};b.prototype.isNumber=function(){return this.bareLiteral(S)};b.prototype.isString=function(){return this.bareLiteral($a)};b.prototype.isRegex=function(){return this.bareLiteral(Y)};b.prototype.isUndefined=function(){return this.bareLiteral(oa)};b.prototype.isNull=function(){return this.bareLiteral(aa)};b.prototype.isBoolean=function(){return this.bareLiteral(Ca)};b.prototype.isAtomic=function(){var a,b,e,d;d=this.properties.concat(this.base);a=0;for(b=d.length;athis.properties.length&&!this.base.isComplex()&&(null==d||!d.isComplex()))return[this,this];n=new b(this.base,this.properties.slice(0,-1));n.isComplex()&&(e=new r(a.scope.freeVariable("base")),n=new b(new C(new q(e,n))));if(!d)return[n,e];d.isComplex()&&(c=new r(a.scope.freeVariable("name")),d=new m(new q(c,d.index)),c=new m(c));return[n.add(d),new b(e||n.base,[c||d])]};b.prototype.compileNode=function(a){var b,e,d,n,c;this.base.front=this.front;c=this.properties; +b=this.base.compileToFragments(a,c.length?t:null);c.length&&Z.test(Da(b))&&b.push(this.makeCode("."));e=0;for(d=c.length;e=Math.abs(this.fromNum-this.toNum))return b=function(){h=[];for(var a=f=this.fromNum,b=this.toNum;f<=b?a<=b:a>=b;f<=b?a++:a--)h.push(a);return h}.apply(this),this.exclusive&& +b.pop(),[this.makeCode("["+b.join(", ")+"]")];c=this.tab+ca;d=a.scope.freeVariable("i",{single:!0});g=a.scope.freeVariable("results");n="\n"+c+g+" \x3d [];";e?(a.index=d,e=Da(this.compileNode(a))):(k=d+" \x3d "+this.fromC+(this.toC!==this.toVar?", "+this.toC:""),e=this.fromVar+" \x3c\x3d "+this.toVar,e="var "+k+"; "+e+" ? "+d+" \x3c"+this.equals+" "+this.toVar+" : "+d+" \x3e"+this.equals+" "+this.toVar+"; "+e+" ? "+d+"++ : "+d+"--");d="{ "+g+".push("+d+"); }\n"+c+"return "+g+";\n"+a.indent;a=function(a){return null!= +a?a.contains(Ja):void 0};if(a(this.from)||a(this.to))b=", arguments";return[this.makeCode("(function() {"+n+"\n"+c+"for ("+e+")"+d+"}).apply(this"+(null!=b?b:"")+")")]};return b}(a);g.Slice=ba=function(a){function b(a){this.range=a;b.__super__.constructor.call(this)}na(b,a);b.prototype.children=["range"];b.prototype.compileNode=function(a){var b,e,d,c,n;b=this.range;c=b.to;d=(b=b.from)&&b.compileToFragments(a,P)||[this.makeCode("0")];c&&(b=c.compileToFragments(a,P),e=Da(b),this.range.exclusive||-1!== ++e)&&(n=", "+(this.range.exclusive?e:c.isNumber()?""+(+e+1):(b=c.compileToFragments(a,t),"+"+Da(b)+" + 1 || 9e9")));return[this.makeCode(".slice("+Da(d)+(n||"")+")")]};return b}(a);g.Obj=z=function(a){function b(a,b){this.generated=null!=b?b:!1;this.objects=this.properties=a||[]}na(b,a);b.prototype.children=["properties"];b.prototype.compileNode=function(a){var b,e,d,c,n,f,g,h,k,m,t,l,p;p=this.properties;if(this.generated)for(e=0,b=p.length;e=y?this.wrapInBraces(e):e;q=v[0];1===u&&q instanceof x&&q.error("Destructuring assignment has no target");n=this.variable.isObject();if(w&&1===u&&!(q instanceof R))return d=null,q instanceof b&&"object"===q.context?(e=q,f=e.variable,g=f.base,q=e.value,q instanceof b&&(d=q.value,q=q.variable)):(q instanceof b&&(d=q.value,q=q.variable),g=n?q["this"]?q.properties[0].name:new Q(q.unwrap().value):new S(0)),c=g.unwrap()instanceof Q,l=new L(l),l.properties.push(new (c?ua:m)(g)),(p=Ga(q.unwrap().value))&& +q.error(p),d&&(l=new B("?",l,d)),(new b(q,l,null,{param:this.param})).compileToFragments(a,A);C=l.compileToFragments(a,h);z=Da(C);e=[];f=!1;l.unwrap()instanceof r&&!this.variable.assigns(z)||(e.push([this.makeCode((d=a.scope.freeVariable("ref"))+" \x3d ")].concat(Aa.call(C))),C=[this.makeCode(d)],z=d);d=l=0;for(t=v.length;lA?this.wrapInBraces(b):b};return b}(a);g.Code=l=function(a){function b(a,b,e){this.params=a||[];this.body=b||new c;this.bound="boundfunc"===e;this.isGenerator=!!this.body.contains(function(a){return a instanceof B&&a.isYield()||a instanceof ra})}na(b,a);b.prototype.children=["params","body"]; +b.prototype.isStatement=function(){return!!this.ctor};b.prototype.jumps=I;b.prototype.makeScope=function(a){return new K(a,this.body,this)};b.prototype.compileNode=function(a){var g,e,d,n,h,k,m,l,p,y,A,u,v;this.bound&&null!=(e=a.scope.method)&&e.bound&&(this.context=a.scope.method.context);if(this.bound&&!this.context)return this.context="_this",e=new b([new W(new r(this.context))],new c([this])),e=new f(e,[new ga]),e.updateLocationDataIfMissing(this.locationData),e.compileNode(a);a.scope=ma(a,"classScope")|| +this.makeScope(a.scope);a.scope.shared=ma(a,"sharedScope");a.indent+=ca;delete a.bare;delete a.isExistentialEquals;e=[];g=[];l=this.params;n=0;for(k=l.length;n=t?this.wrapInBraces(g):g};b.prototype.eachParamName=function(a){var b,e,d,c,f;c=this.params;f=[];b=0;for(e=c.length;b=c.length)return[];if(1===c.length)return d=c[0],c=d.compileToFragments(a,h),e?c:[].concat(d.makeCode(za("slice",a)+".call("),c,d.makeCode(")"));e=c.slice(n);g=k=0;for(m=e.length;k=t)return(new C(this)).compileToFragments(a);c="+"===d||"-"===d;("new"===d||"typeof"===d||"delete"===d||c&&this.first instanceof b&&this.first.operator=== +d)&&e.push([this.makeCode(" ")]);if(c&&this.first instanceof b||"new"===d&&this.first.isStatement(a))this.first=new C(this.first);e.push(this.first.compileToFragments(a,y));this.flip&&e.reverse();return this.joinFragmentArrays(e,"")};b.prototype.compileYield=function(a){var b,e,c;e=[];b=this.operator;null==a.scope.parent&&this.error("yield can only occur inside functions");0<=Ha.call(Object.keys(this.first),"expression")&&!(this.first instanceof ha)?null!=this.first.expression&&e.push(this.first.expression.compileToFragments(a, +y)):(a.level>=P&&e.push([this.makeCode("(")]),e.push([this.makeCode(b)]),""!==(null!=(c=this.first.base)?c.value:void 0)&&e.push([this.makeCode(" ")]),e.push(this.first.compileToFragments(a,y)),a.level>=P&&e.push([this.makeCode(")")]));return this.joinFragmentArrays(e,"")};b.prototype.compilePower=function(a){var b;b=new L(new r("Math"),[new ua(new Q("pow"))]);return(new f(b,[this.first,this.second])).compileToFragments(a)};b.prototype.compileFloorDivision=function(a){var d,e;e=new L(new r("Math"), +[new ua(new Q("floor"))]);d=this.second.isComplex()?new C(this.second):this.second;d=new b("/",this.first,d);return(new f(e,[d])).compileToFragments(a)};b.prototype.compileModulo=function(a){var b;b=new L(new H(za("modulo",a)));return(new f(b,[this.first,this.second])).compileToFragments(a)};b.prototype.toString=function(a){return b.__super__.toString.call(this,a,this.constructor.name+" "+this.operator)};return b}(a);g.In=O=function(a){function b(a,b){this.object=a;this.array=b}na(b,a);b.prototype.children= +["object","array"];b.prototype.invert=T;b.prototype.compileNode=function(a){var b,e,d,c,f;if(this.array instanceof L&&this.array.isArray()&&this.array.base.objects.length){f=this.array.base.objects;e=0;for(d=f.length;ex,this.step&&null!=x&&m||(e=d.freeVariable("len")),f=""+v+p+" \x3d 0, "+e+" \x3d "+I+".length",g=""+v+p+" \x3d "+I+".length - 1",e=p+" \x3c "+e,d=p+" \x3e\x3d 0",this.step?(null!=x?m&&(e=d,f=g):(e=D+" \x3e 0 ? "+e+" : "+d,f="("+D+" \x3e 0 ? ("+f+") : "+g+")"),p=p+" +\x3d "+D):p=""+(y!==p?"++"+p:p+"++"),f=[this.makeCode(f+"; "+e+"; "+v+p)]));this.returns&&(z=""+this.tab+n+" \x3d [];\n",B="\n"+this.tab+"return "+n+";",b.makeReturn(n)); +this.guard&&(1=p?this.wrapInBraces(b):b};b.prototype.unfoldSoak=function(){return this.soak&&this};return b}(a);la={extend:function(a){return"function(child, parent) { for (var key in parent) { if ("+za("hasProp",a)+".call(parent, key)) child[key] \x3d parent[key]; } function ctor() { this.constructor \x3d child; } ctor.prototype \x3d parent.prototype; child.prototype \x3d new ctor(); child.__super__ \x3d parent.prototype; return child; }"}, +bind:function(){return"function(fn, me){ return function(){ return fn.apply(me, arguments); }; }"},indexOf:function(){return"[].indexOf || function(item) { for (var i \x3d 0, l \x3d this.length; i \x3c l; i++) { if (i in this \x26\x26 this[i] \x3d\x3d\x3d item) return i; } return -1; }"},modulo:function(){return"function(a, b) { return (+a % (b \x3d +b) + b) % b; }"},hasProp:function(){return"{}.hasOwnProperty"},slice:function(){return"[].slice"}};A=1;P=2;h=3;p=4;y=5;t=6;ca=" ";Z=/^[+-]?\d+$/;za= +function(a,b){var c,f;f=b.scope.root;if(a in f.utilities)return f.utilities[a];c=f.freeVariable(a);f.assign(c,la[a](b));return f.utilities[a]=c};Pa=function(a,b){a=a.replace(/\n/g,"$\x26"+b);return a.replace(/\s+$/,"")};Ja=function(a){return a instanceof r&&"arguments"===a.value};Ta=function(a){return a instanceof ga||a instanceof l&&a.bound||a instanceof ka};ta=function(a){return a.isComplex()||("function"===typeof a.isAssignable?a.isAssignable():void 0)};Na=function(a,b,c){if(a=b[c].unfoldSoak(a))return b[c]= +a.body,a.body=new L(b),a}}).call(this);return g}();u["./sourcemap"]=function(){var g={};(function(){var u;u=function(){function g(g){this.line=g;this.columns=[]}g.prototype.add=function(g,a,c){var q;q=a[0];a=a[1];null==c&&(c={});if(!this.columns[g]||!c.noReplace)return this.columns[g]={line:this.line,column:g,sourceLine:q,sourceColumn:a}};g.prototype.sourceLocation=function(g){for(var a;!((a=this.columns[g])||0>=g);)g--;return a&&[a.sourceLine,a.sourceColumn]};return g}();g=function(){function g(){this.lines= +[]}g.prototype.add=function(g,a,c){var q,f;null==c&&(c={});f=a[0];a=a[1];return((q=this.lines)[f]||(q[f]=new u(f))).add(a,g,c)};g.prototype.sourceLocation=function(g){var a,c;a=g[0];for(g=g[1];!((c=this.lines[a])||0>=a);)a--;return c&&c.sourceLocation(g)};g.prototype.generate=function(g,a){var c,q,f,u,l,w,G,F,x,v,M,N,J;null==g&&(g={});null==a&&(a=null);l=w=u=J=0;v=!1;c="";M=this.lines;f=q=0;for(G=M.length;qg?1:0);c||!a;)g=c&31,(c>>=5)&&(g|=32),a+=this.encodeBase64(g);return a};g.prototype.encodeBase64= +function(g){var a;if(!(a="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"[g]))throw Error("Cannot Base64 encode value: "+g);return a};return g}()}).call(this);return g}();u["./coffee-script"]=function(){var g={};(function(){var qa,xa,q,a,c,Ca,f,D,l,w,G,F,x,v,M,N,J,r,E,O={}.hasOwnProperty;D=u("fs");E=u("vm");M=u("path");qa=u("./lexer").Lexer;v=u("./parser").parser;w=u("./helpers");xa=u("./sourcemap");c=u("../../package.json");g.VERSION=c.version;g.FILE_EXTENSIONS=[".coffee",".litcoffee", +".coffee.md"];g.helpers=w;q=function(a){switch(!1){case "function"!==typeof Buffer:return(new Buffer(a)).toString("base64");case "function"!==typeof btoa:return btoa(encodeURIComponent(a).replace(/%([0-9A-F]{2})/g,function(a,c){return String.fromCharCode("0x"+c)}));default:throw Error("Unable to base64 encode inline sourcemap.");}};c=function(a){return function(c,f){null==f&&(f={});try{return a.call(this,c,f)}catch(p){if("string"!==typeof c)throw p;throw w.updateSyntaxError(p,c,f.filename);}}};r= +{};J={};g.compile=a=c(function(a,c){var f,g,h,k,m,l,u,D,E,I,F,G,z;h=w.extend;c=h({},c);l=c.sourceMap||c.inlineMap||null==c.filename;h=c.filename||"\x3canonymous\x3e";r[h]=a;l&&(I=new xa);g=x.tokenize(a,c);k=c;E=[];m=0;for(u=g.length;m", path_to) - if not os.path.isfile(path_from): - print("Missing file") - continue - - data = open(path_from, "rb").read() - - try: - open(path_to, 'wb').write(data) - num_ok += 1 - except Exception as err: try: - print("Error writing: %s. Renaming old file as workaround..." % err) - path_to_tmp = path_to + "-old" - if os.path.isfile(path_to_tmp): - os.unlink(path_to_tmp) - os.rename(path_to, path_to_tmp) - num_rename += 1 - open(path_to, 'wb').write(data) - shutil.copymode(path_to_tmp, path_to) # Copy permissions - print("Write done after rename!") - num_ok += 1 - except Exception as err: - print("Write error after rename: %s" % err) - num_error += 1 - print("* Updated files: %s, renamed: %s, error: %s" % (num_ok, num_rename, num_error)) + open(dest_path, 'wb').write(data) + except Exception, err: + print dest_path, err + + print "Done." + return True if __name__ == "__main__": + # Fix broken gevent SSL + import sys sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # Imports relative to src + from Config import config + config.parse() + from src.util import SslPatch - update() + try: + update() + except Exception, err: + print "Update error: %s" % err + raw_input("Press enter to exit") diff --git a/zeronet.py b/zeronet.py index 457efb19..8fd07968 100755 --- a/zeronet.py +++ b/zeronet.py @@ -1,138 +1,84 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python2.7 + +# Included modules import os import sys def main(): - if sys.version_info.major < 3: - print("Error: Python 3.x is required") - sys.exit(0) - - if "--silent" not in sys.argv: - print("- Starting ZeroNet...") + print "- Starting ZeroNet..." main = None try: + app_dir = os.path.dirname(os.path.abspath(__file__)) + os.chdir(app_dir) # Change working dir to zeronet.py dir + sys.path.insert(0, os.path.join(app_dir, "src/lib")) # External liblary directory + sys.path.insert(0, os.path.join(app_dir, "src")) # Imports relative to src import main main.start() - except Exception as err: # Prevent closing + if main.update_after_shutdown: # Updater + import gc + import update + # Try cleanup openssl + try: + if "lib.opensslVerify" in sys.modules: + sys.modules["lib.opensslVerify"].opensslVerify.closeLibrary() + except Exception, err: + print "Error closing opensslVerify lib", err + try: + if "lib.pyelliptic" in sys.modules: + sys.modules["lib.pyelliptic"].openssl.closeLibrary() + except Exception, err: + print "Error closing pyelliptic lib", err + + # Close lock file + sys.modules["main"].lock.close() + + # Update + update.update() + + # Close log files + logger = sys.modules["main"].logging.getLogger() + + for handler in logger.handlers[:]: + handler.flush() + handler.close() + logger.removeHandler(handler) + + except Exception, err: # Prevent closing import traceback try: import logging logging.exception("Unhandled exception: %s" % err) - except Exception as log_err: - print("Failed to log error:", log_err) + except Exception, log_err: + print "Failed to log error:", log_err traceback.print_exc() from Config import config - error_log_path = config.log_dir + "/error.log" - traceback.print_exc(file=open(error_log_path, "w")) - print("---") - print("Please report it: https://github.com/HelloZeroNet/ZeroNet/issues/new?assignees=&labels=&template=bug-report.md") - if sys.platform.startswith("win") and "python.exe" not in sys.executable: - displayErrorMessage(err, error_log_path) + traceback.print_exc(file=open(config.log_dir + "/error.log", "a")) - if main and (main.update_after_shutdown or main.restart_after_shutdown): # Updater - if main.update_after_shutdown: - print("Shutting down...") - prepareShutdown() - import update - print("Updating...") - update.update() - if main.restart_after_shutdown: - print("Restarting...") - restart() - else: - print("Shutting down...") - prepareShutdown() - print("Restarting...") - restart() + if main and main.update_after_shutdown: # Updater + # Restart + gc.collect() # Garbage collect + print "Restarting..." + import time + time.sleep(1) # Wait files to close + args = sys.argv[:] + sys.executable = sys.executable.replace(".pkg", "") # Frozen mac fix -def displayErrorMessage(err, error_log_path): - import ctypes - import urllib.parse - import subprocess + if not getattr(sys, 'frozen', False): + args.insert(0, sys.executable) - MB_YESNOCANCEL = 0x3 - MB_ICONEXCLAIMATION = 0x30 + if sys.platform == 'win32': + args = ['"%s"' % arg for arg in args] - ID_YES = 0x6 - ID_NO = 0x7 - ID_CANCEL = 0x2 - - err_message = "%s: %s" % (type(err).__name__, err) - err_title = "Unhandled exception: %s\nReport error?" % err_message - - res = ctypes.windll.user32.MessageBoxW(0, err_title, "ZeroNet error", MB_YESNOCANCEL | MB_ICONEXCLAIMATION) - if res == ID_YES: - import webbrowser - report_url = "https://github.com/ZeroNetX/ZeroNet/issues/new?assignees=&labels=&template=bug-report.md&title=%s" - webbrowser.open(report_url % urllib.parse.quote("Unhandled exception: %s" % err_message)) - if res in [ID_YES, ID_NO]: - subprocess.Popen(['notepad.exe', error_log_path]) - -def prepareShutdown(): - import atexit - atexit._run_exitfuncs() - - # Close log files - if "main" in sys.modules: - logger = sys.modules["main"].logging.getLogger() - - for handler in logger.handlers[:]: - handler.flush() - handler.close() - logger.removeHandler(handler) - - import time - time.sleep(1) # Wait for files to close - -def restart(): - args = sys.argv[:] - - sys.executable = sys.executable.replace(".pkg", "") # Frozen mac fix - - if not getattr(sys, 'frozen', False): - args.insert(0, sys.executable) - - # Don't open browser after restart - if "--open_browser" in args: - del args[args.index("--open_browser") + 1] # argument value - del args[args.index("--open_browser")] # argument key - - if getattr(sys, 'frozen', False): - pos_first_arg = 1 # Only the executable - else: - pos_first_arg = 2 # Interpter, .py file path - - args.insert(pos_first_arg, "--open_browser") - args.insert(pos_first_arg + 1, "False") - - if sys.platform == 'win32': - args = ['"%s"' % arg for arg in args] - - try: - print("Executing %s %s" % (sys.executable, args)) - os.execv(sys.executable, args) - except Exception as err: - print("Execv error: %s" % err) - print("Bye.") - - -def start(): - app_dir = os.path.dirname(os.path.abspath(__file__)) - os.chdir(app_dir) # Change working dir to zeronet.py dir - sys.path.insert(0, os.path.join(app_dir, "src/lib")) # External liblary directory - sys.path.insert(0, os.path.join(app_dir, "src")) # Imports relative to src - - if "--update" in sys.argv: - sys.argv.remove("--update") - print("Updating...") - import update - update.update() - else: - main() + try: + print "Executing %s %s" % (sys.executable, args) + os.execv(sys.executable, args) + except Exception, err: + print "Execv error: %s" % err + print "Bye." if __name__ == '__main__': - start() + main()